@tailor-platform/sdk 1.25.1 → 1.25.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/CHANGELOG.md +36 -0
  2. package/dist/{application-iRp2OYMz.mjs → application-91Th6tm6.mjs} +127 -128
  3. package/dist/application-91Th6tm6.mjs.map +1 -0
  4. package/dist/application-DegTCDd8.mjs +9 -0
  5. package/dist/{brand-BOaOlsiP.mjs → brand-GZnI4eYb.mjs} +1 -1
  6. package/dist/{brand-BOaOlsiP.mjs.map → brand-GZnI4eYb.mjs.map} +1 -1
  7. package/dist/chunk-Cz-A8uMR.mjs +3 -0
  8. package/dist/cli/index.d.mts +2 -3
  9. package/dist/cli/index.mjs +172 -216
  10. package/dist/cli/index.mjs.map +1 -1
  11. package/dist/cli/lib.d.mts +706 -1108
  12. package/dist/cli/lib.mjs +28 -16
  13. package/dist/cli/lib.mjs.map +1 -1
  14. package/dist/cli/skills.mjs +2 -1
  15. package/dist/cli/skills.mjs.map +1 -1
  16. package/dist/configure/index.d.mts +5 -5
  17. package/dist/configure/index.mjs +4 -3
  18. package/dist/configure/index.mjs.map +1 -1
  19. package/dist/{enum-constants-BxdLbhsW.mjs → enum-constants-6uK0VI_s.mjs} +1 -1
  20. package/dist/{enum-constants-BxdLbhsW.mjs.map → enum-constants-6uK0VI_s.mjs.map} +1 -1
  21. package/dist/{env-jndw86T4.d.mts → env-uBeVwE9B.d.mts} +4 -7
  22. package/dist/{file-utils-C2r3AVbI.mjs → file-utils-2T9w20FP.mjs} +1 -1
  23. package/dist/{file-utils-C2r3AVbI.mjs.map → file-utils-2T9w20FP.mjs.map} +1 -1
  24. package/dist/{index-Do7zo7z-.d.mts → index-BD-K97-C.d.mts} +2 -2
  25. package/dist/{index-BuWllBxZ.d.mts → index-Bu12qy3m.d.mts} +25 -22
  26. package/dist/{index-VZq4IAEK.d.mts → index-CT53egux.d.mts} +2 -2
  27. package/dist/{index-DZRZdh71.d.mts → index-D1J5SfyK.d.mts} +2 -2
  28. package/dist/{index-DoxGF8-i.d.mts → index-cZilKprY.d.mts} +2 -2
  29. package/dist/{interceptor-DVy32eIG.mjs → interceptor-BPiIBTk_.mjs} +2 -1
  30. package/dist/{interceptor-DVy32eIG.mjs.map → interceptor-BPiIBTk_.mjs.map} +1 -1
  31. package/dist/{job-BQDunsd7.mjs → job-DdfW7vH3.mjs} +3 -3
  32. package/dist/{job-BQDunsd7.mjs.map → job-DdfW7vH3.mjs.map} +1 -1
  33. package/dist/kysely/index.d.mts +3 -4
  34. package/dist/kysely/index.mjs +1 -0
  35. package/dist/kysely/index.mjs.map +1 -1
  36. package/dist/{kysely-type-DzLBuVp6.mjs → kysely-type-cMNbsQ6k.mjs} +1 -1
  37. package/dist/{kysely-type-DzLBuVp6.mjs.map → kysely-type-cMNbsQ6k.mjs.map} +1 -1
  38. package/dist/package-json-Bj76LPsV.mjs +4 -0
  39. package/dist/{package-json-DnbGCOkg.mjs → package-json-CVUv8Y9T.mjs} +1 -1
  40. package/dist/{package-json-DnbGCOkg.mjs.map → package-json-CVUv8Y9T.mjs.map} +1 -1
  41. package/dist/plugin/builtin/enum-constants/index.d.mts +1 -2
  42. package/dist/plugin/builtin/enum-constants/index.mjs +2 -1
  43. package/dist/plugin/builtin/file-utils/index.d.mts +1 -2
  44. package/dist/plugin/builtin/file-utils/index.mjs +2 -1
  45. package/dist/plugin/builtin/kysely-type/index.d.mts +1 -2
  46. package/dist/plugin/builtin/kysely-type/index.mjs +2 -1
  47. package/dist/plugin/builtin/seed/index.d.mts +1 -2
  48. package/dist/plugin/builtin/seed/index.mjs +2 -1
  49. package/dist/plugin/index.d.mts +2 -3
  50. package/dist/plugin/index.mjs +5 -4
  51. package/dist/plugin/index.mjs.map +1 -1
  52. package/dist/{plugin-3sT6Tcq0.d.mts → plugin-zY5wvV82.d.mts} +117 -225
  53. package/dist/{query-D3UyoG68.mjs → query-BpppEOzu.mjs} +502 -541
  54. package/dist/query-BpppEOzu.mjs.map +1 -0
  55. package/dist/{schema-Fbfeq9gi.mjs → schema-BePzTFBV.mjs} +9 -9
  56. package/dist/schema-BePzTFBV.mjs.map +1 -0
  57. package/dist/seed/index.d.mts +1 -4
  58. package/dist/seed/index.mjs +1 -0
  59. package/dist/seed/index.mjs.map +1 -1
  60. package/dist/{seed-DkKAheSe.mjs → seed-CCVRLibh.mjs} +24 -10
  61. package/dist/seed-CCVRLibh.mjs.map +1 -0
  62. package/dist/{telemetry-d_lgTL33.mjs → telemetry-0w8OupuQ.mjs} +2 -2
  63. package/dist/{telemetry-d_lgTL33.mjs.map → telemetry-0w8OupuQ.mjs.map} +1 -1
  64. package/dist/telemetry-DDQZRqHK.mjs +4 -0
  65. package/dist/utils/test/index.d.mts +2 -3
  66. package/dist/utils/test/index.mjs +3 -2
  67. package/dist/utils/test/index.mjs.map +1 -1
  68. package/dist/{app-config-QzNOFnEy.d.mts → workflow.generated-v1LXRuB6.d.mts} +19 -22
  69. package/docs/cli/application.md +73 -33
  70. package/docs/cli/auth.md +56 -24
  71. package/docs/cli/completion.md +6 -0
  72. package/docs/cli/executor.md +73 -36
  73. package/docs/cli/function.md +30 -14
  74. package/docs/cli/secret.md +93 -41
  75. package/docs/cli/staticwebsite.md +38 -17
  76. package/docs/cli/tailordb.md +106 -48
  77. package/docs/cli/user.md +74 -10
  78. package/docs/cli/workflow.md +70 -39
  79. package/docs/cli/workspace.md +166 -60
  80. package/docs/cli-reference.md +22 -12
  81. package/docs/services/workflow.md +26 -0
  82. package/package.json +7 -6
  83. package/dist/application-B4ORumjE.mjs +0 -8
  84. package/dist/application-iRp2OYMz.mjs.map +0 -1
  85. package/dist/package-json-BKA36WTo.mjs +0 -3
  86. package/dist/query-D3UyoG68.mjs.map +0 -1
  87. package/dist/schema-Fbfeq9gi.mjs.map +0 -1
  88. package/dist/seed-DkKAheSe.mjs.map +0 -1
  89. package/dist/telemetry-J6dpByo2.mjs +0 -3
@@ -1 +0,0 @@
1
- {"version":3,"file":"query-D3UyoG68.mjs","names":["fs","fs","create","trn","idpConfigs","create","planServices","trn","client","create","planServices","trn","idpConfig","attr","fs","create","create","trn","executors","create","planServices","trn","secrets","create","create","trn","fs","fs","waitForExecution","resolve","arg","trn","fs","trn","create","create","dryRun","buildOnly","config","application","workflowBuildResult","functionRegistry","tailorDB","staticWebsite","idp","auth","pipeline","app","executor","workflow","secretManager","nameArgs","getCommand","sleep","resolve","formatTime","colorizeStatus","getCommand","resolve","formatTime","arg","attempts","setTimeout","job","listCommand","processResolver","watch","fs","resolve","relativePath","db","pluginExecutorFiles","generate","listCommand","getCommand","listCommand","loadOptions","removeCommand","appInfo","fs","fsPromises","defineApplication","listCommand","loadOptions","loadOptions","listCommand","loadOptions","loadOptions","listCommand","loadOptions","loadOptions","loadOptions","loadOptions","loadOptions","fs","CLIError","query","query","result","sqlQuery","parseSql"],"sources":["../src/cli/shared/errors.ts","../src/cli/shared/args.ts","../src/cli/commands/api.ts","../src/cli/cache/types.ts","../src/cli/cache/store.ts","../src/cli/cache/manager.ts","../src/cli/shared/type-generator.ts","../src/types/plugin-generation.ts","../src/plugin/manager.ts","../src/cli/commands/apply/change-set.ts","../src/cli/commands/apply/label.ts","../src/cli/commands/apply/application.ts","../src/cli/commands/apply/idp.ts","../src/cli/commands/apply/auth.ts","../src/cli/commands/apply/confirm.ts","../src/cli/shared/runtime-args.ts","../src/cli/commands/apply/function-registry.ts","../src/cli/commands/apply/executor.ts","../src/cli/commands/apply/resolver.ts","../src/cli/commands/apply/secrets-state.ts","../src/cli/commands/apply/secret-manager.ts","../src/cli/commands/apply/staticwebsite.ts","../src/cli/commands/tailordb/migrate/config.ts","../src/cli/commands/tailordb/migrate/diff-calculator.ts","../src/cli/commands/tailordb/migrate/snapshot.ts","../src/cli/commands/tailordb/migrate/bundler.ts","../src/cli/commands/tailordb/migrate/types.ts","../src/cli/shared/script-executor.ts","../src/cli/commands/apply/tailordb/migration.ts","../src/cli/commands/apply/tailordb/index.ts","../src/cli/commands/apply/workflow.ts","../src/cli/commands/apply/apply.ts","../src/cli/commands/executor/status.ts","../src/cli/commands/executor/transform.ts","../src/cli/commands/executor/get.ts","../src/cli/shared/format.ts","../src/cli/shared/function-execution.ts","../src/cli/commands/workflow/args.ts","../src/cli/commands/workflow/status.ts","../src/cli/commands/workflow/transform.ts","../src/cli/commands/workflow/executions.ts","../src/cli/commands/workflow/get.ts","../src/cli/commands/workflow/start.ts","../src/cli/commands/executor/jobs.ts","../src/cli/commands/executor/list.ts","../src/cli/commands/executor/trigger.ts","../src/cli/commands/executor/webhook.ts","../src/cli/commands/generate/types.ts","../src/cli/commands/generate/watch/index.ts","../src/cli/commands/generate/service.ts","../src/cli/commands/machineuser/list.ts","../src/cli/commands/machineuser/token.ts","../src/cli/commands/oauth2client/transform.ts","../src/cli/commands/oauth2client/get.ts","../src/cli/commands/oauth2client/list.ts","../src/cli/commands/remove.ts","../src/cli/commands/show.ts","../src/cli/shared/beta.ts","../src/cli/shared/editor.ts","../src/cli/commands/tailordb/migrate/db-types-generator.ts","../src/cli/commands/tailordb/migrate/template-generator.ts","../src/cli/commands/tailordb/migrate/generate.ts","../src/cli/shared/config.ts","../src/cli/shared/tailordb-namespace.ts","../src/cli/commands/tailordb/truncate.ts","../src/cli/commands/workflow/list.ts","../src/cli/commands/workflow/resume.ts","../src/cli/commands/workspace/app/transform.ts","../src/cli/commands/workspace/app/health.ts","../src/cli/commands/workspace/app/list.ts","../src/cli/commands/workspace/transform.ts","../src/cli/commands/workspace/create.ts","../src/cli/commands/workspace/delete.ts","../src/cli/commands/workspace/get.ts","../src/cli/commands/workspace/list.ts","../src/cli/commands/workspace/restore.ts","../src/cli/commands/workspace/user/transform.ts","../src/cli/commands/workspace/user/invite.ts","../src/cli/commands/workspace/user/list.ts","../src/cli/commands/workspace/user/remove.ts","../src/cli/commands/workspace/user/update.ts","../src/cli/bundler/query/query-bundler.ts","../src/cli/query/errors.ts","../src/cli/query/graphql-repl.ts","../src/cli/query/sql-repl.ts","../src/cli/query/sql-type-extractor.ts","../src/cli/query/type-field-order.ts","../src/cli/query/index.ts"],"sourcesContent":["import chalk from \"chalk\";\n\n/**\n * Options for creating a CLI error\n */\nexport interface CLIErrorOptions {\n message: string;\n details?: string;\n suggestion?: string;\n command?: string;\n code?: string;\n}\n\n/**\n * CLI error interface with formatted output\n */\nexport interface CLIError extends Error {\n readonly code?: string;\n readonly details?: string;\n readonly suggestion?: string;\n readonly command?: string;\n format(): string;\n}\n\ntype CLIErrorInternal = Error & {\n code?: string;\n details?: string;\n suggestion?: string;\n command?: string;\n format(): string;\n};\n\n/**\n * Format CLI error for output\n * @param error - CLIError instance to format\n * @returns Formatted error message\n */\nfunction formatError(error: CLIError): string {\n const parts: string[] = [\n chalk.red(`Error${error.code ? ` [${error.code}]` : \"\"}: ${error.message}`),\n ];\n\n if (error.details) {\n parts.push(`\\n ${chalk.gray(\"Details:\")} ${error.details}`);\n }\n\n if (error.suggestion) {\n parts.push(`\\n ${chalk.cyan(\"Suggestion:\")} ${error.suggestion}`);\n }\n\n if (error.command) {\n parts.push(\n `\\n ${chalk.gray(\"Help:\")} Run \\`tailor-sdk ${error.command} --help\\` for usage information.`,\n );\n }\n\n return parts.join(\"\");\n}\n\n/**\n * Create a CLI error with formatted output\n * @param options - Options to construct a CLIError\n * @returns Constructed CLIError instance\n */\nfunction createCLIError(options: CLIErrorOptions): CLIError {\n const error = new Error(options.message) as CLIErrorInternal;\n error.name = \"CLIError\";\n error.code = options.code;\n error.details = options.details;\n error.suggestion = options.suggestion;\n error.command = options.command;\n error.format = () => formatError(error);\n return error;\n}\n\n/**\n * Type guard to check if an error is a CLIError\n * @param error - Error to check\n * @returns True if the error is a CLIError\n */\nexport function isCLIError(error: unknown): error is CLIError {\n return error instanceof Error && error.name === \"CLIError\";\n}\n\n// Re-export createCLIError as CLIError for backward compatibility\nexport { createCLIError as CLIError };\n","import * as fs from \"node:fs\";\nimport { parseEnv } from \"node:util\";\nimport * as path from \"pathe\";\nimport { arg } from \"politty\";\nimport { z } from \"zod\";\nimport { isCLIError } from \"./errors\";\nimport { logger } from \"./logger\";\n\ntype ArgsShape = Record<string, z.ZodType>;\n\n// ============================================================================\n// Validators\n// ============================================================================\n\nconst durationUnits = [\"ms\", \"s\", \"m\"] as const;\ntype DurationUnit = (typeof durationUnits)[number];\n\nconst unitToMs: Record<DurationUnit, number> = {\n ms: 1,\n s: 1000,\n m: 60 * 1000,\n};\n\nconst durationPattern = /^(\\d+)(ms|s|m)$/;\n\n/**\n * Schema for duration string validation (e.g., \"3s\", \"500ms\", \"1m\")\n * Only validates format; use parseDuration() to convert to milliseconds\n */\nexport const durationArg = z\n .string()\n .refine((val) => durationPattern.test(val), {\n message: \"Invalid duration format. Expected format: '3s', '500ms', '1m'\",\n })\n .refine(\n (val) => {\n const match = val.match(durationPattern)!;\n return parseInt(match[1], 10) > 0;\n },\n { message: \"Duration must be greater than 0\" },\n );\n\n/**\n * Parse a validated duration string into milliseconds\n * @param duration - Duration string (e.g., \"3s\", \"500ms\", \"1m\")\n * @returns Duration in milliseconds\n */\nexport function parseDuration(duration: string): number {\n const match = duration.match(durationPattern)!;\n const value = parseInt(match[1], 10);\n const unit = match[2] as DurationUnit;\n return value * unitToMs[unit];\n}\n\n/**\n * Schema for positive integer validation (from string input)\n * Transforms the string to a number\n */\nexport const positiveIntArg = z.coerce.number().int().positive();\n\n// ============================================================================\n// Env File Helpers\n// ============================================================================\n\ntype EnvFileArg = string | string[] | undefined;\n\n/**\n * Load env files from parsed arguments.\n * Processes --env-file first, then --env-file-if-exists.\n *\n * Follows Node.js --env-file behavior:\n * - Variables already set in the environment are NOT overwritten\n * - Variables from later files override those from earlier files\n * @param envFiles - Required env file path(s) that must exist\n * @param envFilesIfExists - Optional env file path(s) that are loaded if they exist\n */\nexport function loadEnvFiles(envFiles: EnvFileArg, envFilesIfExists: EnvFileArg): void {\n // Snapshot of originally set environment variables (before loading any files)\n const originalEnvKeys = new Set(Object.keys(process.env));\n\n const load = (files: EnvFileArg, required: boolean) => {\n for (const file of [files ?? []].flat()) {\n const envPath = path.resolve(process.cwd(), file);\n if (!fs.existsSync(envPath)) {\n if (required) {\n throw new Error(`Environment file not found: ${envPath}`);\n }\n continue;\n }\n const content = fs.readFileSync(envPath, \"utf-8\");\n const parsed = parseEnv(content);\n for (const [key, value] of Object.entries(parsed)) {\n // Skip if the variable was originally set in the environment\n if (originalEnvKeys.has(key)) {\n continue;\n }\n // Allow overwriting between env files\n process.env[key] = value;\n }\n }\n };\n\n load(envFiles, true);\n load(envFilesIfExists, false);\n}\n\n// ============================================================================\n// Argument Definitions\n// ============================================================================\n\n/**\n * Common arguments for all CLI commands\n *\n * NOTE: --env-file and --env-file-if-exists collide with Node.js flags due to a bug\n * (https://github.com/nodejs/node/issues/54232). Node.js parses these even after the\n * script path, causing warnings (twice due to tsx loader).\n */\nexport const commonArgs = {\n \"env-file\": arg(z.string().optional(), {\n alias: \"e\",\n description: \"Path to the environment file (error if not found)\",\n completion: { type: \"file\", matcher: [\".env.*\", \".env\"] },\n }),\n \"env-file-if-exists\": arg(z.string().optional(), {\n description: \"Path to the environment file (ignored if not found)\",\n completion: { type: \"file\", matcher: [\".env.*\", \".env\"] },\n }),\n verbose: arg(z.boolean().default(false), {\n description: \"Enable verbose logging\",\n }),\n} satisfies ArgsShape;\n\n/**\n * Arguments for commands that require workspace context\n */\nexport const workspaceArgs = {\n \"workspace-id\": arg(z.string().optional(), {\n alias: \"w\",\n description: \"Workspace ID\",\n completion: { type: \"none\" },\n }),\n profile: arg(z.string().optional(), {\n alias: \"p\",\n description: \"Workspace profile\",\n completion: { type: \"none\" },\n }),\n} satisfies ArgsShape;\n\n/**\n * Shared config arg for commands that accept a config file path\n */\nexport const configArg = {\n config: arg(z.string().default(\"tailor.config.ts\"), {\n alias: \"c\",\n description: \"Path to SDK config file\",\n completion: { type: \"file\", extensions: [\"ts\"] },\n }),\n} satisfies ArgsShape;\n\n/**\n * Arguments for commands that interact with deployed resources (includes config)\n */\nexport const deploymentArgs = {\n ...workspaceArgs,\n ...configArg,\n} satisfies ArgsShape;\n\n/**\n * Arguments for commands that require confirmation\n */\nexport const confirmationArgs = {\n yes: arg(z.boolean().default(false), {\n alias: \"y\",\n description: \"Skip confirmation prompts\",\n }),\n} satisfies ArgsShape;\n\n/**\n * Arguments for JSON output\n */\nexport const jsonArgs = {\n json: arg(z.boolean().default(false), {\n alias: \"j\",\n description: \"Output as JSON\",\n }),\n} satisfies ArgsShape;\n\nexport type CommonArgsType = z.infer<z.ZodObject<typeof commonArgs>>;\n\n/**\n * Wrapper for command handlers that provides:\n * - Environment file loading\n * - Error handling with formatted output\n * - Exit code management\n * @template T\n * @param handler - Command handler function\n * @returns Wrapped handler\n */\nexport const withCommonArgs =\n <T extends CommonArgsType>(handler: (args: T) => Promise<void>) =>\n async (args: T) => {\n try {\n // Set JSON mode if --json flag is provided\n if (\"json\" in args && typeof args.json === \"boolean\") {\n logger.jsonMode = args.json;\n }\n\n // Load env files\n loadEnvFiles(args[\"env-file\"] as EnvFileArg, args[\"env-file-if-exists\"] as EnvFileArg);\n\n // Initialize telemetry (no-op if OTEL_EXPORTER_OTLP_ENDPOINT is not set)\n const { initTelemetry } = await import(\"@/cli/telemetry\");\n await initTelemetry();\n\n await handler(args);\n } catch (error) {\n if (isCLIError(error)) {\n logger.log(error.format());\n if (args.verbose && error.stack) {\n logger.debug(`\\nStack trace:\\n${error.stack}`);\n }\n } else if (error instanceof Error) {\n logger.error(error.message);\n if (args.verbose && error.stack) {\n logger.debug(`\\nStack trace:\\n${error.stack}`);\n }\n } else {\n logger.error(`Unknown error: ${error}`);\n }\n process.exit(1);\n } finally {\n // Flush pending traces before process exit\n const { shutdownTelemetry } = await import(\"@/cli/telemetry\");\n await shutdownTelemetry();\n }\n process.exit(0);\n };\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { platformBaseUrl, userAgent } from \"@/cli/shared/client\";\nimport { loadAccessToken } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nexport interface ApiCallOptions {\n profile?: string;\n endpoint: string;\n body?: string;\n}\n\nexport interface ApiCallResult {\n status: number;\n data: unknown;\n}\n\n/**\n * Call Tailor Platform API endpoints directly.\n * If the endpoint doesn't contain \"/\", it defaults to `tailor.v1.OperatorService/{endpoint}`.\n * @param options - API call options (profile, endpoint, body)\n * @returns Response status and data\n */\nexport async function apiCall(options: ApiCallOptions): Promise<ApiCallResult> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n\n // Determine the endpoint path\n let endpointPath: string;\n if (options.endpoint.includes(\"/\")) {\n endpointPath = options.endpoint;\n } else {\n // Default to OperatorService if no \"/\" in endpoint\n endpointPath = `tailor.v1.OperatorService/${options.endpoint}`;\n }\n\n // Build the full URL\n const url = new URL(endpointPath, platformBaseUrl);\n\n // Make the request\n const response = await fetch(url.toString(), {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: `Bearer ${accessToken}`,\n \"User-Agent\": await userAgent(),\n },\n body: options.body ?? \"{}\",\n });\n\n const data = await response.json();\n\n if (!response.ok) {\n throw new Error(`API call failed (${response.status}): ${JSON.stringify(data)}`);\n }\n\n return {\n status: response.status,\n data,\n };\n}\n\nexport const apiCommand = defineCommand({\n name: \"api\",\n description: \"Call Tailor Platform API endpoints directly.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n body: arg(z.string().default(\"{}\"), {\n alias: \"b\",\n description: \"Request body as JSON\",\n }),\n endpoint: arg(z.string(), {\n positional: true,\n description:\n \"API endpoint to call (e.g., 'GetApplication' or 'tailor.v1.OperatorService/GetApplication')\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const result = await apiCall({\n profile: args.profile,\n endpoint: args.endpoint as string,\n body: args.body,\n });\n\n if (args.json) {\n logger.log(JSON.stringify(result.data, null, 2));\n } else {\n logger.log(JSON.stringify(result.data, null, 2));\n }\n }),\n});\n","import { z } from \"zod\";\n\nconst cacheOutputFileSchema = z.object({\n outputPath: z.string(),\n contentHash: z.string(),\n});\n\nconst cacheEntrySchema = z.object({\n kind: z.literal(\"bundle\"),\n inputHash: z.string(),\n dependencyPaths: z.array(z.string()),\n outputFiles: z.array(cacheOutputFileSchema),\n createdAt: z.string(),\n});\n\nconst cacheManifestSchema = z.object({\n version: z.literal(1),\n sdkVersion: z.string(),\n lockfileHash: z.string().optional(),\n entries: z.record(z.string(), cacheEntrySchema),\n});\n\ntype CacheEntry = z.infer<typeof cacheEntrySchema>;\ntype CacheManifest = z.infer<typeof cacheManifestSchema>;\n\n/**\n * Runtime configuration for the caching subsystem.\n */\ntype CacheConfig = {\n /** Directory where cache artifacts are stored. */\n cacheDir: string;\n};\n\nexport { cacheManifestSchema };\nexport type { CacheConfig, CacheEntry, CacheManifest };\n","import * as fs from \"node:fs\";\nimport * as path from \"pathe\";\nimport { cacheManifestSchema } from \"./types\";\nimport type { CacheConfig, CacheEntry, CacheManifest } from \"./types\";\n\n/**\n * Public interface for cache persistence operations.\n */\ntype CacheStore = {\n /** Read manifest from disk, returning undefined if missing or invalid. */\n loadManifest(): CacheManifest | undefined;\n /** Return the current in-memory manifest, loading from disk on first access if not yet loaded. */\n getCurrentManifest(): CacheManifest | undefined;\n /** Persist manifest to disk using atomic write (temp file + rename). */\n saveManifest(manifest: CacheManifest): void;\n /** Retrieve a cache entry by key from the in-memory manifest. */\n getEntry(key: string): CacheEntry | undefined;\n /** Add or update a cache entry in the in-memory manifest. */\n setEntry(key: string, entry: CacheEntry): void;\n /** Remove a cache entry from the in-memory manifest. */\n deleteEntry(key: string): void;\n /** Copy a bundled output file into cache/bundles/. */\n storeBundleOutput(cacheKey: string, sourcePath: string): void;\n /** Copy a cached bundle back to the target path. Returns false if not found. */\n restoreBundleOutput(cacheKey: string, targetPath: string): boolean;\n /** Delete the entire cache directory. */\n clean(): void;\n};\n\nconst MANIFEST_FILENAME = \"manifest.json\";\nconst BUNDLES_DIR = \"bundles\";\n\n/**\n * Create a cache store for manifest persistence and bundle output storage.\n * @param config - Cache configuration specifying the cache directory\n * @returns A CacheStore instance\n */\nfunction createCacheStore(config: CacheConfig): CacheStore {\n // Tri-state: null = not yet loaded, undefined = loaded but missing/invalid, CacheManifest = loaded\n let cachedManifest: CacheManifest | undefined | null = null;\n\n function manifestPath(): string {\n return path.join(config.cacheDir, MANIFEST_FILENAME);\n }\n\n function bundlesDir(): string {\n return path.join(config.cacheDir, BUNDLES_DIR);\n }\n\n function bundlePath(cacheKey: string): string {\n return path.join(bundlesDir(), `${cacheKey.replaceAll(\":\", \"_\")}.js`);\n }\n\n function loadManifest(): CacheManifest | undefined {\n try {\n const raw = fs.readFileSync(manifestPath(), \"utf-8\");\n const result = cacheManifestSchema.safeParse(JSON.parse(raw));\n\n if (!result.success) {\n cachedManifest = undefined;\n return undefined;\n }\n\n cachedManifest = result.data;\n return cachedManifest;\n } catch {\n // Missing file, parse error, etc.\n cachedManifest = undefined;\n return undefined;\n }\n }\n\n function getCurrentManifest(): CacheManifest | undefined {\n if (cachedManifest === null) {\n loadManifest();\n }\n return cachedManifest ?? undefined;\n }\n\n function ensureManifestLoaded(): CacheManifest {\n if (cachedManifest === null) {\n loadManifest();\n }\n if (cachedManifest == null) {\n cachedManifest = {\n version: 1,\n sdkVersion: \"\",\n entries: {},\n };\n }\n return cachedManifest;\n }\n\n function saveManifest(manifest: CacheManifest): void {\n fs.mkdirSync(config.cacheDir, { recursive: true });\n\n const target = manifestPath();\n const tmpFile = path.join(config.cacheDir, `.manifest.${process.pid}.tmp`);\n\n // Atomic write: write to temp file, then rename\n try {\n fs.writeFileSync(tmpFile, JSON.stringify(manifest, null, 2), \"utf-8\");\n fs.renameSync(tmpFile, target);\n } catch (e) {\n try {\n fs.rmSync(tmpFile, { force: true });\n } catch {\n // Ignore cleanup errors\n }\n throw e;\n }\n\n cachedManifest = manifest;\n }\n\n function getEntry(key: string): CacheEntry | undefined {\n const manifest = ensureManifestLoaded();\n return manifest.entries[key];\n }\n\n function setEntry(key: string, entry: CacheEntry): void {\n const manifest = ensureManifestLoaded();\n manifest.entries[key] = entry;\n }\n\n function deleteEntry(key: string): void {\n const manifest = ensureManifestLoaded();\n // eslint-disable-next-line @typescript-eslint/no-dynamic-delete -- Cache entry removal by dynamic key\n delete manifest.entries[key];\n }\n\n function storeBundleOutput(cacheKey: string, sourcePath: string): void {\n const dir = bundlesDir();\n fs.mkdirSync(dir, { recursive: true });\n fs.copyFileSync(sourcePath, bundlePath(cacheKey));\n\n const mapSource = `${sourcePath}.map`;\n const cachedMapPath = `${bundlePath(cacheKey)}.map`;\n if (fs.existsSync(mapSource)) {\n fs.copyFileSync(mapSource, cachedMapPath);\n } else {\n fs.rmSync(cachedMapPath, { force: true });\n }\n }\n\n function restoreBundleOutput(cacheKey: string, targetPath: string): boolean {\n const cached = bundlePath(cacheKey);\n const targetDir = path.dirname(targetPath);\n fs.mkdirSync(targetDir, { recursive: true });\n try {\n fs.copyFileSync(cached, targetPath);\n } catch (e) {\n if ((e as NodeJS.ErrnoException).code === \"ENOENT\") return false;\n throw e;\n }\n\n const cachedMap = `${cached}.map`;\n if (fs.existsSync(cachedMap)) {\n fs.copyFileSync(cachedMap, `${targetPath}.map`);\n }\n\n return true;\n }\n\n function clean(): void {\n fs.rmSync(config.cacheDir, { recursive: true, force: true });\n cachedManifest = null;\n }\n\n return {\n loadManifest,\n getCurrentManifest,\n saveManifest,\n getEntry,\n setEntry,\n deleteEntry,\n storeBundleOutput,\n restoreBundleOutput,\n clean,\n };\n}\n\nexport { createCacheStore };\nexport type { CacheStore };\n","import * as path from \"pathe\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { createBundleCache, type BundleCache } from \"./bundle-cache\";\nimport { createCacheStore } from \"./store\";\n\n/**\n * Options for creating a CacheManager.\n */\ntype CacheManagerOptions = {\n /** Whether caching is enabled. Defaults to true. */\n enabled?: boolean;\n /** Directory where cache artifacts are stored. Defaults to `<distDir>/cache`. */\n cacheDir?: string;\n /** Current SDK version for cache invalidation on upgrade. */\n sdkVersion: string;\n /** Hash of the lockfile for cache invalidation on dependency changes. */\n lockfileHash?: string;\n};\n\n/**\n * Top-level facade that orchestrates cache operations.\n */\ntype CacheManager = {\n readonly enabled: boolean;\n readonly bundleCache: BundleCache;\n /** Persist the cache manifest to disk. */\n finalize(): void;\n};\n\n/**\n * Create a CacheManager that orchestrates cache operations.\n * @param options - Configuration for the cache manager\n * @returns A CacheManager instance\n */\nfunction createCacheManager(options: CacheManagerOptions): CacheManager {\n const enabled = options.enabled ?? true;\n\n if (!enabled) {\n return {\n enabled: false,\n bundleCache: {\n tryRestore() {\n return false;\n },\n save() {\n // no-op\n },\n },\n finalize() {\n // no-op\n },\n };\n }\n\n const cacheDir = options.cacheDir ?? path.resolve(getDistDir(), \"cache\");\n\n const store = createCacheStore({ cacheDir });\n\n // Load existing manifest and check SDK version / lockfile hash for cache invalidation\n const existingManifest = store.loadManifest();\n if (existingManifest) {\n if (existingManifest.sdkVersion !== options.sdkVersion) {\n logger.debug(\n `Cache invalidated: SDK version changed from ${existingManifest.sdkVersion} to ${options.sdkVersion}`,\n );\n store.clean();\n } else if (existingManifest.lockfileHash !== options.lockfileHash) {\n logger.debug(\"Cache invalidated: lockfile changed\");\n store.clean();\n }\n }\n\n const bundleCache = createBundleCache(store);\n\n return {\n enabled: true,\n bundleCache,\n finalize() {\n // Use in-memory manifest to preserve entries added during the session\n const manifest = store.getCurrentManifest() ?? {\n version: 1 as const,\n sdkVersion: options.sdkVersion,\n lockfileHash: options.lockfileHash,\n entries: {},\n };\n manifest.sdkVersion = options.sdkVersion;\n manifest.lockfileHash = options.lockfileHash;\n store.saveManifest(manifest);\n },\n };\n}\n\nexport { createCacheManager };\nexport type { CacheManager, CacheManagerOptions };\n","import * as fs from \"node:fs\";\nimport ml from \"multiline-ts\";\nimport * as path from \"pathe\";\nimport { logger } from \"@/cli/shared/logger\";\nimport type { AppConfig } from \"@/types/app-config\";\n\nexport interface AttributeMapConfig {\n [key: string]: string;\n}\n\nexport type AttributeListConfig = readonly string[];\n\ninterface ExtractedAttributes {\n attributeMap?: AttributeMapConfig;\n attributeList?: AttributeListConfig;\n env?: Record<string, string | number | boolean>;\n}\n\ntype AttributeFieldLike = {\n type?: string;\n metadata?: {\n array?: boolean;\n allowedValues?: Array<{ value: string }>;\n };\n};\n\n/**\n * Extract attribute definitions from the app config for user-defined typing.\n * @param config - Application config to inspect\n * @returns Extracted attribute map/list and env values\n * @internal\n */\nexport function extractAttributesFromConfig(config: AppConfig): ExtractedAttributes {\n return collectAttributesFromConfig(config);\n}\n\n/**\n * Generate the contents of the user-defined type definition file.\n * @param attributeMap - Attribute map configuration\n * @param attributeList - Attribute list configuration\n * @param env - Environment configuration\n * @returns Generated type definition source\n */\nexport function generateTypeDefinition(\n attributeMap: AttributeMapConfig | undefined,\n attributeList: AttributeListConfig | undefined,\n env?: Record<string, string | number | boolean>,\n): string {\n // Generate AttributeMap interface\n // attributeMap values are type string representations (e.g., \"string\", \"boolean\", \"string[]\")\n const mapFields = attributeMap\n ? Object.entries(attributeMap)\n .map(([key, value]) => ` ${key}: ${value};`)\n .join(\"\\n\")\n : \"\";\n\n const mapBody =\n !attributeMap || Object.keys(attributeMap).length === 0\n ? \"{}\"\n : `{\n${mapFields}\n }`;\n\n // Generate AttributeList type as a tuple of strings based on the length\n const listType = attributeList ? `[${attributeList.map(() => \"string\").join(\", \")}]` : \"[]\";\n\n // Use interface with __tuple marker for declaration merging and tuple type support\n const listBody = `{\n __tuple?: ${listType};\n }`;\n\n // Generate Env interface\n const envFields = env\n ? Object.entries(env)\n .map(([key, value]) => {\n const valueType = typeof value === \"string\" ? `\"${value}\"` : String(value);\n return ` ${key}: ${valueType};`;\n })\n .join(\"\\n\")\n : \"\";\n\n const envBody =\n !env || Object.keys(env).length === 0\n ? \"{}\"\n : `{\n${envFields}\n }`;\n\n return ml /* ts */ `\n// This file is auto-generated by @tailor-platform/sdk\n// Do not edit this file manually\n// Regenerated automatically when running 'tailor-sdk apply' or 'tailor-sdk generate'\n\ndeclare module \"@tailor-platform/sdk\" {\n interface AttributeMap ${mapBody}\n interface AttributeList ${listBody}\n interface Env ${envBody}\n}\n\nexport {};\n\n`;\n}\n\nfunction collectAttributesFromConfig(config: AppConfig): ExtractedAttributes {\n const auth = config.auth;\n if (!auth || typeof auth !== \"object\") {\n return {};\n }\n\n const inferAttributeType = (field?: AttributeFieldLike): string => {\n const type = field?.type;\n const metadata = field?.metadata;\n\n // Default to string if no metadata\n if (!metadata) {\n return \"string\";\n }\n\n let typeStr = \"string\";\n\n if (type === \"boolean\") {\n typeStr = \"boolean\";\n } else if (type === \"enum\" && metadata.allowedValues) {\n // Generate union type from enum values\n typeStr = metadata.allowedValues.map((v) => `\"${v.value}\"`).join(\" | \");\n }\n\n // Add array suffix if needed\n if (metadata.array) {\n typeStr += \"[]\";\n }\n\n return typeStr;\n };\n\n // Check if auth has userProfile with attributes/attributeList\n if (\"userProfile\" in auth) {\n const userProfile = (\n auth as {\n userProfile?: {\n type?: {\n fields?: Record<string, AttributeFieldLike>;\n };\n attributes?: Record<string, true>;\n attributeList?: AttributeListConfig;\n };\n }\n ).userProfile;\n\n const attributes = userProfile?.attributes;\n const fields = userProfile?.type?.fields;\n const attributeList = userProfile?.attributeList;\n\n // Convert attributes to AttributeMapConfig by inferring types from field metadata\n const attributeMap: AttributeMapConfig | undefined = attributes\n ? Object.keys(attributes).reduce((acc, key) => {\n acc[key] = inferAttributeType(fields?.[key]);\n return acc;\n }, {} as AttributeMapConfig)\n : undefined;\n\n return {\n attributeMap,\n attributeList,\n };\n }\n\n if (\"machineUserAttributes\" in auth) {\n const machineUserAttributes = (\n auth as {\n machineUserAttributes?: Record<string, AttributeFieldLike>;\n }\n ).machineUserAttributes;\n\n if (!machineUserAttributes) {\n return {};\n }\n\n const attributeMap = Object.entries(machineUserAttributes).reduce((acc, [key, field]) => {\n acc[key] = inferAttributeType(field);\n return acc;\n }, {} as AttributeMapConfig);\n\n return {\n attributeMap,\n };\n }\n\n return {};\n}\n\n/**\n * Resolve the output path for the generated type definition file.\n * @param configPath - Path to Tailor config file\n * @returns Absolute path to the type definition file\n */\nfunction resolveTypeDefinitionPath(configPath: string): string {\n return path.join(path.dirname(path.resolve(configPath)), \"tailor.d.ts\");\n}\n\n/**\n * Options for generating user type definitions\n */\ninterface GenerateUserTypesOptions {\n /** Application config */\n config: AppConfig;\n /** Path to Tailor config file */\n configPath: string;\n}\n\n/**\n * Generate user type definitions from the app config and write them to disk.\n * @param options - Generation options\n * @returns Promise that resolves when types are generated\n */\nexport async function generateUserTypes(options: GenerateUserTypesOptions): Promise<void> {\n const { config, configPath } = options;\n try {\n const { attributeMap, attributeList } = extractAttributesFromConfig(config);\n if (!attributeMap && !attributeList) {\n logger.info(\"No attributes found in configuration\", { mode: \"plain\" });\n }\n\n if (attributeMap) {\n logger.debug(`Extracted AttributeMap: ${JSON.stringify(attributeMap)}`);\n }\n if (attributeList) {\n logger.debug(`Extracted AttributeList: ${JSON.stringify(attributeList)}`);\n }\n\n const env = config.env;\n if (env) {\n logger.debug(`Extracted Env: ${JSON.stringify(env)}`);\n }\n\n // Generate type definition\n const typeDefContent = generateTypeDefinition(attributeMap, attributeList, env);\n const outputPath = resolveTypeDefinitionPath(configPath);\n\n // Write to file\n fs.mkdirSync(path.dirname(outputPath), { recursive: true });\n fs.writeFileSync(outputPath, typeDefContent);\n const relativePath = path.relative(process.cwd(), outputPath);\n logger.newline();\n logger.success(`Generated type definitions: ${relativePath}`, {\n mode: \"plain\",\n });\n } catch (error) {\n logger.error(\"Error generating types\");\n logger.error(String(error));\n // Don't throw - this should not block apply/generate\n }\n}\n","import type { IdProvider as IdProviderConfig, OAuth2ClientInput } from \"./auth.generated\";\nimport type { Executor } from \"./executor.generated\";\nimport type { DependencyKind } from \"./generator-config\";\nimport type { PluginAttachment } from \"./plugin\";\nimport type { Resolver } from \"./resolver.generated\";\nimport type { TailorDBType, TypeSourceInfoEntry } from \"./tailordb\";\n\n/**\n * A single generated file to write to disk.\n */\nexport interface GeneratedFile {\n path: string;\n content: string;\n skipIfExists?: boolean;\n executable?: boolean;\n}\n\n/**\n * Result returned by generation-time hooks.\n */\nexport interface GeneratorResult {\n files: GeneratedFile[];\n errors?: string[];\n}\n\n/**\n * Auth configuration available to generation-time hooks.\n */\nexport interface GeneratorAuthInput {\n name: string;\n userProfile?: {\n typeName: string;\n namespace: string;\n usernameField: string;\n };\n machineUsers?: Record<string, { attributes: Record<string, unknown> }>;\n oauth2Clients?: Record<string, OAuth2ClientInput>;\n idProvider?: IdProviderConfig;\n}\n\n/**\n * Namespace-level TailorDB data available to generation-time hooks.\n */\nexport interface TailorDBNamespaceData {\n /** Namespace name */\n namespace: string;\n /** All TailorDB types in this namespace, keyed by type name */\n types: Record<string, TailorDBType>;\n /** Source info for each type (file path, export name, plugin info) */\n sourceInfo: ReadonlyMap<string, TypeSourceInfoEntry>;\n /** Plugin attachments configured on each type via .plugin() method */\n pluginAttachments: ReadonlyMap<string, readonly PluginAttachment[]>;\n}\n\n/**\n * Namespace-level resolver data available to generation-time hooks.\n */\nexport interface ResolverNamespaceData {\n /** Namespace name */\n namespace: string;\n /** All resolvers in this namespace, keyed by resolver name */\n resolvers: Record<string, Resolver>;\n}\n\n/**\n * Context passed to plugin's onTailorDBReady hook.\n * @template PluginConfig - Plugin-level configuration type\n */\nexport interface TailorDBReadyContext<PluginConfig = unknown> {\n /** All TailorDB namespaces with their types and metadata */\n tailordb: TailorDBNamespaceData[];\n /** Auth configuration */\n auth?: GeneratorAuthInput;\n /** Base directory for generated files */\n baseDir: string;\n /** Path to tailor.config.ts */\n configPath: string;\n /** Plugin-level configuration passed via definePlugins() */\n pluginConfig: PluginConfig;\n}\n\n/**\n * Context passed to plugin's onResolverReady hook.\n * @template PluginConfig - Plugin-level configuration type\n */\nexport interface ResolverReadyContext<PluginConfig = unknown> {\n /** All TailorDB namespaces with their types and metadata */\n tailordb: TailorDBNamespaceData[];\n /** All resolver namespaces with their resolvers */\n resolvers: ResolverNamespaceData[];\n /** Auth configuration */\n auth?: GeneratorAuthInput;\n /** Base directory for generated files */\n baseDir: string;\n /** Path to tailor.config.ts */\n configPath: string;\n /** Plugin-level configuration passed via definePlugins() */\n pluginConfig: PluginConfig;\n}\n\n/**\n * Context passed to plugin's onExecutorReady hook.\n * @template PluginConfig - Plugin-level configuration type\n */\nexport interface ExecutorReadyContext<PluginConfig = unknown> {\n /** All TailorDB namespaces with their types and metadata */\n tailordb: TailorDBNamespaceData[];\n /** All resolver namespaces with their resolvers */\n resolvers: ResolverNamespaceData[];\n /** All executors, keyed by executor name */\n executors: Record<string, Executor>;\n /** Auth configuration */\n auth?: GeneratorAuthInput;\n /** Base directory for generated files */\n baseDir: string;\n /** Path to tailor.config.ts */\n configPath: string;\n /** Plugin-level configuration passed via definePlugins() */\n pluginConfig: PluginConfig;\n}\n\n/**\n * Derives generation-time dependency set from hook presence on a plugin.\n * @param plugin - The plugin object to inspect.\n * @param plugin.onTailorDBReady - Hook for TailorDB readiness.\n * @param plugin.onResolverReady - Hook for resolver readiness.\n * @param plugin.onExecutorReady - Hook for executor readiness.\n * @returns Set of dependency kinds required by the plugin.\n */\nexport function getPluginGenerationDependencies(plugin: {\n onTailorDBReady?: unknown;\n onResolverReady?: unknown;\n onExecutorReady?: unknown;\n}): Set<DependencyKind> {\n const deps = new Set<DependencyKind>();\n if (plugin.onTailorDBReady) {\n deps.add(\"tailordb\");\n }\n if (plugin.onResolverReady) {\n deps.add(\"resolver\");\n }\n if (plugin.onExecutorReady) {\n deps.add(\"executor\");\n }\n return deps;\n}\n\n/**\n * Checks if a plugin has any generation-time hooks.\n * @param plugin - The plugin object to inspect.\n * @param plugin.onTailorDBReady - Hook for TailorDB readiness.\n * @param plugin.onResolverReady - Hook for resolver readiness.\n * @param plugin.onExecutorReady - Hook for executor readiness.\n * @returns True if the plugin has at least one generation hook.\n */\nexport function hasGenerationHooks(plugin: {\n onTailorDBReady?: unknown;\n onResolverReady?: unknown;\n onExecutorReady?: unknown;\n}): boolean {\n return !!(plugin.onTailorDBReady || plugin.onResolverReady || plugin.onExecutorReady);\n}\n","import { db } from \"@/parser/service/tailordb/runtime\";\nimport { hasGenerationHooks, getPluginGenerationDependencies } from \"@/types/plugin-generation\";\nimport type { DependencyKind } from \"@/parser/generator-config\";\nimport type {\n Plugin,\n PluginGeneratedExecutor,\n PluginGeneratedType,\n PluginNamespaceProcessContext,\n PluginOutput,\n TypePluginOutput,\n} from \"@/types/plugin\";\nimport type {\n TailorAnyDBType,\n TailorTypePermission,\n TailorTypeGqlPermission,\n} from \"@/types/tailordb\";\n\n/**\n * Context for processing a single plugin attachment on a raw TailorDBType\n */\nexport interface ProcessAttachmentContext {\n type: TailorAnyDBType;\n typeConfig: unknown;\n namespace: string;\n pluginId: string;\n}\n\n/**\n * Information about a plugin-generated type (for type file generation)\n */\nexport interface PluginGeneratedTypeInfo {\n /** Plugin ID that generated this type */\n pluginId: string;\n /** Plugin import path for resolving executor files */\n pluginImportPath: string;\n /** Source type name that triggered the plugin */\n sourceTypeName: string;\n /** Kind identifier for this generated type */\n kind: string;\n /** The generated TailorDB type object */\n type: PluginGeneratedType;\n /** Namespace where this type was generated */\n namespace: string;\n /** Plugin config used to generate this type */\n pluginConfig?: unknown;\n}\n\n/**\n * Extended executor info with plugin import path\n */\nexport interface PluginExecutorInfoExtended extends PluginExecutorInfo {\n /** Plugin's import path for resolving executor files */\n pluginImportPath: string;\n}\n\n/**\n * Result of processing a type-attached plugin\n */\nexport type ProcessAttachmentResult =\n | { success: true; output: TypePluginOutput }\n | { success: false; error: string };\n\n/**\n * Result of processing a namespace plugin\n */\nexport type ProcessNamespaceResult =\n | { success: true; output: PluginOutput }\n | { success: false; error: string };\n\n/**\n * Information about a plugin-generated executor\n */\nexport interface PluginExecutorInfo {\n /** The executor definition */\n executor: PluginGeneratedExecutor;\n /** Plugin ID that generated this executor */\n pluginId: string;\n /** Namespace where the executor was generated */\n namespace: string;\n /** Source type name (for type-attached executors, undefined for namespace) */\n sourceTypeName?: string;\n}\n\n/**\n * Manages plugin registration and processing\n */\nexport class PluginManager {\n private plugins: Map<string, Plugin> = new Map();\n private generatedExecutors: PluginExecutorInfo[] = [];\n private generatedTypes: PluginGeneratedTypeInfo[] = [];\n private namespaceGeneratedTypeKeys: Set<string> = new Set();\n private namespaceGeneratedExecutorKeys: Set<string> = new Set();\n\n /** Generated plugin executor file paths */\n private pluginExecutorFiles: string[] = [];\n\n constructor(plugins: Plugin[] = []) {\n for (const plugin of plugins) {\n if (this.plugins.has(plugin.id)) {\n throw new Error(\n `Duplicate plugin ID \"${plugin.id}\" detected. Each plugin must have a unique ID.`,\n );\n }\n this.plugins.set(plugin.id, plugin);\n }\n }\n\n /**\n * Process a single plugin attachment on a raw TailorDBType.\n * This method is called during type loading before parsing.\n * @param context - Context containing the raw type, config, namespace, and plugin ID\n * @returns Result with plugin output on success, or error message on failure\n */\n async processAttachment(context: ProcessAttachmentContext): Promise<ProcessAttachmentResult> {\n const plugin = this.plugins.get(context.pluginId);\n if (!plugin) {\n return {\n success: false,\n error: `Plugin \"${context.pluginId}\" not found`,\n };\n }\n\n const typeConfigRequired = plugin.typeConfigRequired;\n const resolvedRequired =\n typeof typeConfigRequired === \"function\"\n ? typeConfigRequired(plugin.pluginConfig)\n : typeConfigRequired === true;\n if (resolvedRequired && (context.typeConfig === undefined || context.typeConfig === null)) {\n return {\n success: false,\n error: `Plugin \"${plugin.id}\" requires typeConfig, but none was provided for type \"${context.type.name}\".`,\n };\n }\n\n // Check if plugin supports type-attached processing\n if (!plugin.onTypeLoaded) {\n return {\n success: false,\n error: `Plugin \"${plugin.id}\" does not support type-attached processing (missing onTypeLoaded method). Use onNamespaceLoaded via definePlugins() instead.`,\n };\n }\n\n // Execute plugin onTypeLoaded with raw TailorDBType\n let output: TypePluginOutput;\n try {\n output = await plugin.onTypeLoaded({\n type: context.type,\n typeConfig: context.typeConfig,\n pluginConfig: plugin.pluginConfig,\n namespace: context.namespace,\n });\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `Plugin \"${plugin.id}\" threw an error while processing type \"${context.type.name}\": ${message}`,\n };\n }\n\n // Collect generated types\n if (output.types && Object.keys(output.types).length > 0) {\n // importPath is guaranteed by schema validation for plugins with definition-time hooks\n const importPath = plugin.importPath!;\n for (const [kind, type] of Object.entries(output.types)) {\n this.generatedTypes.push({\n pluginId: context.pluginId,\n pluginImportPath: importPath,\n sourceTypeName: context.type.name,\n kind,\n type,\n namespace: context.namespace,\n pluginConfig: plugin.pluginConfig,\n });\n }\n }\n\n // Collect generated executors\n if (output.executors && output.executors.length > 0) {\n for (const executor of output.executors) {\n this.generatedExecutors.push({\n executor,\n pluginId: context.pluginId,\n namespace: context.namespace,\n sourceTypeName: context.type.name,\n });\n }\n }\n\n return { success: true, output };\n }\n\n /**\n * Process namespace plugins that don't require a source type.\n * This method is called once per namespace for plugins with onNamespaceLoaded method.\n * @param namespace - The target namespace for generated types\n * @returns Array of results with plugin outputs and configs\n */\n async processNamespacePlugins(\n namespace: string,\n ): Promise<Array<{ pluginId: string; config: unknown; result: ProcessNamespaceResult }>> {\n const results: Array<{ pluginId: string; config: unknown; result: ProcessNamespaceResult }> =\n [];\n\n for (const [pluginId, plugin] of this.plugins) {\n // Skip plugins without onNamespaceLoaded method\n if (!plugin.onNamespaceLoaded) {\n continue;\n }\n\n // Use stored plugin config (from definePlugins)\n const config = plugin.pluginConfig;\n\n // Execute plugin onNamespaceLoaded\n const context: PluginNamespaceProcessContext = {\n pluginConfig: config,\n namespace,\n };\n\n let output: Awaited<ReturnType<NonNullable<Plugin[\"onNamespaceLoaded\"]>>>;\n try {\n output = await plugin.onNamespaceLoaded(context);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n results.push({\n pluginId,\n config,\n result: {\n success: false,\n error: `Plugin \"${plugin.id}\" threw an error during namespace processing for \"${namespace}\": ${message}`,\n },\n });\n continue;\n }\n\n // Collect generated executors (namespace - no source type)\n if (output.executors && output.executors.length > 0) {\n for (const executor of output.executors) {\n const executorKey = `${pluginId}:${executor.name}`;\n if (this.namespaceGeneratedExecutorKeys.has(executorKey)) {\n continue;\n }\n this.namespaceGeneratedExecutorKeys.add(executorKey);\n this.generatedExecutors.push({\n executor,\n pluginId,\n namespace,\n });\n }\n }\n\n // Collect generated types (namespace - no source type)\n if (output.types && Object.keys(output.types).length > 0) {\n // importPath is guaranteed by schema validation for plugins with definition-time hooks\n const importPath = plugin.importPath!;\n for (const [kind, type] of Object.entries(output.types)) {\n const typeKey = `${pluginId}:${kind}:${type.name}`;\n if (this.namespaceGeneratedTypeKeys.has(typeKey)) {\n continue;\n }\n this.namespaceGeneratedTypeKeys.add(typeKey);\n this.generatedTypes.push({\n pluginId,\n pluginImportPath: importPath,\n sourceTypeName: \"(namespace)\",\n kind,\n type,\n namespace,\n pluginConfig: plugin.pluginConfig,\n });\n }\n }\n\n results.push({\n pluginId,\n config,\n result: { success: true, output },\n });\n }\n\n return results;\n }\n\n /**\n * Get plugins that have onNamespaceLoaded method\n * @returns Array of plugin IDs that support namespace processing\n */\n getNamespacePluginIds(): string[] {\n return Array.from(this.plugins.entries())\n .filter(([, plugin]) => plugin.onNamespaceLoaded !== undefined)\n .map(([id]) => id);\n }\n\n /**\n * Get the count of registered plugins\n * @returns Number of registered plugins\n */\n get pluginCount(): number {\n return this.plugins.size;\n }\n\n /**\n * Get a plugin by its ID\n * @param pluginId - The plugin ID to look up\n * @returns The plugin instance, or undefined if not found\n */\n getPlugin(pluginId: string): Plugin | undefined {\n return this.plugins.get(pluginId);\n }\n\n /**\n * Get the import path for a plugin\n * @param pluginId - The plugin ID to look up\n * @returns The plugin's import path, or undefined if not found\n */\n getPluginImportPath(pluginId: string): string | undefined {\n return this.plugins.get(pluginId)?.importPath;\n }\n\n /**\n * Get all plugin-generated executors\n * @returns Array of plugin-generated executor info\n */\n getPluginGeneratedExecutors(): ReadonlyArray<PluginExecutorInfo> {\n return this.generatedExecutors;\n }\n\n /**\n * Get all plugin-generated executors with import paths\n * @returns Array of plugin-generated executor info with import paths\n */\n getPluginGeneratedExecutorsWithImportPath(): ReadonlyArray<PluginExecutorInfoExtended> {\n return this.generatedExecutors.map((info) => ({\n ...info,\n pluginImportPath: this.getPluginImportPath(info.pluginId) ?? \"\",\n }));\n }\n\n /**\n * Get all plugin-generated types\n * @returns Array of plugin-generated type info\n */\n getPluginGeneratedTypes(): ReadonlyArray<PluginGeneratedTypeInfo> {\n return this.generatedTypes;\n }\n\n /**\n * Get plugin-generated executors for a specific namespace\n * @param namespace - The namespace to filter by\n * @returns Array of plugin-generated executor info for the namespace\n */\n getPluginGeneratedExecutorsForNamespace(namespace: string): ReadonlyArray<PluginExecutorInfo> {\n return this.generatedExecutors.filter((info) => info.namespace === namespace);\n }\n\n /**\n * Get plugins that have any generation-time hooks.\n * @returns Array of plugins with generation hooks\n */\n getPluginsWithGenerationHooks(): Plugin[] {\n return Array.from(this.plugins.values()).filter((plugin) => hasGenerationHooks(plugin));\n }\n\n /**\n * Get the generation-time dependencies for a specific plugin.\n * @param pluginId - The plugin ID to look up\n * @returns Set of dependency kinds, or empty set if plugin not found\n */\n getPluginGenerationDependencies(pluginId: string): Set<DependencyKind> {\n const plugin = this.plugins.get(pluginId);\n if (!plugin) return new Set();\n return getPluginGenerationDependencies(plugin);\n }\n\n /**\n * Generate plugin files (types and executors) and store the executor file paths.\n * @param params - Parameters for file generation\n * @returns Generated executor file paths\n */\n generatePluginFiles(params: GeneratePluginFilesParams): string[] {\n const { outputDir, sourceTypeInfoMap, configPath, typeGenerator, executorGenerator } = params;\n\n // Generate type files\n const typeGenerationResult = typeGenerator(this.generatedTypes, outputDir);\n\n // Generate executor files\n const pluginExecutors = this.getPluginGeneratedExecutorsWithImportPath();\n this.pluginExecutorFiles = executorGenerator(\n pluginExecutors,\n outputDir,\n typeGenerationResult,\n sourceTypeInfoMap,\n configPath,\n );\n\n return this.pluginExecutorFiles;\n }\n\n /**\n * Extend a TailorDB type with new fields.\n * This method handles the `db.type()` call and metadata copying internally.\n * @param params - Parameters for type extension\n * @returns The extended TailorDB type\n */\n extendType(params: ExtendTypeParams): TailorAnyDBType {\n const { originalType, extendFields, pluginId } = params;\n const existingFieldNames = Object.keys(originalType.fields);\n const newFieldNames = Object.keys(extendFields);\n const duplicateFields = newFieldNames.filter((name) => existingFieldNames.includes(name));\n\n if (duplicateFields.length > 0) {\n throw new Error(\n `Plugin \"${pluginId}\" attempted to add fields that already exist in type \"${originalType.name}\": ${duplicateFields.join(\", \")}. ` +\n `extendFields cannot overwrite existing fields.`,\n );\n }\n\n const mergedFields = {\n ...originalType.fields,\n ...extendFields,\n };\n\n const { id: _id, ...fieldsWithoutId } = mergedFields;\n const pluralForm = originalType.metadata.settings?.pluralForm;\n const typeName = pluralForm\n ? ([originalType.name, pluralForm] as [string, string])\n : originalType.name;\n const extendedType = db.type(typeName, fieldsWithoutId);\n return copyMetadataToExtendedType(originalType, extendedType);\n }\n}\n\n/**\n * Source info for user-defined types\n */\nexport type SourceTypeInfo = {\n filePath: string;\n exportName: string;\n};\n\n/**\n * Result of generating plugin type files\n */\nexport interface PluginTypeGenerationResult {\n /** Map of type name to generated file path (relative to outputDir) */\n typeFilePaths: Map<string, string>;\n /** List of all generated file paths (absolute) */\n generatedFiles: string[];\n}\n\n/**\n * Parameters for generating plugin files\n */\nexport interface GeneratePluginFilesParams {\n /** Base output directory (e.g., .tailor-sdk/plugin) */\n outputDir: string;\n /** Map of source type names to their source info */\n sourceTypeInfoMap: Map<string, SourceTypeInfo>;\n /** Path to tailor.config.ts (used for resolving plugin import paths) */\n configPath: string;\n /** Function to generate type files */\n typeGenerator: (\n types: ReadonlyArray<PluginGeneratedTypeInfo>,\n outputDir: string,\n ) => PluginTypeGenerationResult;\n /** Function to generate executor files */\n executorGenerator: (\n executors: ReadonlyArray<PluginExecutorInfoExtended>,\n outputDir: string,\n typeGenerationResult: PluginTypeGenerationResult,\n sourceTypeInfoMap: Map<string, SourceTypeInfo>,\n configPath: string,\n ) => string[];\n}\n\n/**\n * Parameters for extending a TailorDB type\n */\nexport interface ExtendTypeParams {\n /** The original TailorDB type to extend */\n originalType: TailorAnyDBType;\n /** New fields to add to the type */\n extendFields: Record<string, unknown>;\n /** The ID of the plugin extending the type */\n pluginId: string;\n}\n\n/**\n * Copy metadata from original type to extended type.\n * Preserves files, settings, permissions, indexes, and plugins.\n * @param original - The original TailorDB type with metadata\n * @param extended - The newly created extended type\n * @returns The extended type with copied metadata\n */\nfunction copyMetadataToExtendedType(\n original: TailorAnyDBType,\n extended: TailorAnyDBType,\n): TailorAnyDBType {\n let result = extended;\n\n // Copy description\n if (original._description) {\n result = result.description(original._description);\n }\n\n // Copy files metadata\n const metadata = original.metadata;\n if (metadata.files && Object.keys(metadata.files).length > 0) {\n result = result.files(metadata.files);\n }\n\n // Copy settings/features (excluding pluralForm which is set during construction)\n if (metadata.settings) {\n const { pluralForm: _pluralForm, ...features } = metadata.settings;\n if (Object.keys(features).length > 0) {\n result = result.features(\n features as typeof features & { aggregation?: true; bulkUpsert?: true },\n );\n }\n }\n\n // Copy permissions from metadata\n // Zod schema operand types are wider unions than the configure layer's discriminated PermissionCondition,\n // so type assertions are needed here.\n if (metadata.permissions?.record) {\n result = result.permission(metadata.permissions.record as TailorTypePermission);\n }\n if (metadata.permissions?.gql) {\n result = result.gqlPermission(metadata.permissions.gql as TailorTypeGqlPermission);\n }\n\n // Copy indexes from metadata (indexes are stored in metadata, not as a direct property)\n if (metadata.indexes && Object.keys(metadata.indexes).length > 0) {\n const indexDefs = Object.entries(metadata.indexes).map(([name, def]) => ({\n name,\n // Cast fields array to tuple type (IndexDef expects [T, T, ...T[]])\n fields: def.fields as [string, string, ...string[]],\n unique: def.unique,\n }));\n result = result.indexes(...indexDefs);\n }\n\n // Copy plugins (but don't re-process them)\n if (original.plugins && original.plugins.length > 0) {\n for (const plugin of original.plugins) {\n // Use type assertion as plugin ID is dynamic at runtime\n result = result.plugin({\n [plugin.pluginId]: plugin.config,\n } as Parameters<typeof result.plugin>[0]);\n }\n }\n\n return result;\n}\n","import { logger, styles, symbols } from \"@/cli/shared/logger\";\n\nexport interface HasName {\n name: string;\n}\n\nexport type ChangeSet<\n C extends HasName,\n U extends HasName,\n D extends HasName,\n R extends HasName = never,\n> = {\n readonly title: string;\n readonly creates: C[];\n readonly updates: U[];\n readonly deletes: D[];\n readonly replaces: R[];\n isEmpty: () => boolean;\n print: () => void;\n};\n\n/**\n * Create a new ChangeSet for tracking resource changes.\n * @param title - Title for the change set\n * @returns Empty ChangeSet instance with isEmpty() and print() methods\n */\nexport function createChangeSet<\n C extends HasName,\n U extends HasName,\n D extends HasName,\n R extends HasName = never,\n>(title: string): ChangeSet<C, U, D, R> {\n const creates: C[] = [];\n const updates: U[] = [];\n const deletes: D[] = [];\n const replaces: R[] = [];\n\n const isEmpty = (): boolean =>\n creates.length === 0 && updates.length === 0 && deletes.length === 0 && replaces.length === 0;\n\n return {\n title,\n creates,\n updates,\n deletes,\n replaces,\n isEmpty,\n print: () => {\n if (isEmpty()) {\n return;\n }\n logger.log(styles.bold(`${title}:`));\n creates.forEach((item) => logger.log(` ${symbols.create} ${item.name}`));\n deletes.forEach((item) => logger.log(` ${symbols.delete} ${item.name}`));\n updates.forEach((item) => logger.log(` ${symbols.update} ${item.name}`));\n replaces.forEach((item) => logger.log(` ${symbols.replace} ${item.name}`));\n },\n };\n}\n","import { readPackageJson } from \"@/cli/shared/package-json\";\nimport type { MessageInitShape } from \"@bufbuild/protobuf\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\nexport type WithLabel<T> = Partial<\n Record<\n string,\n {\n resource: T;\n label: string | undefined;\n allLabels?: Record<string, string>;\n }\n >\n>;\n\n/**\n * Build TRN prefix for a workspace.\n * @param workspaceId - Workspace ID\n * @returns TRN prefix string\n */\nexport function trnPrefix(workspaceId: string): string {\n return `trn:v1:workspace:${workspaceId}`;\n}\n\nexport const sdkNameLabelKey = \"sdk-name\";\n\n/**\n * Build metadata request with SDK labels.\n * @param trn - Target TRN\n * @param appName - Application name label\n * @param existingLabels - Existing labels to preserve (optional)\n * @returns Metadata request\n */\nexport async function buildMetaRequest(\n trn: string,\n appName: string,\n existingLabels?: Record<string, string>,\n): Promise<MessageInitShape<typeof SetMetadataRequestSchema>> {\n const packageJson = await readPackageJson();\n // Format version to be suitable for label value\n const sdkVersion = packageJson.version\n ? `v${packageJson.version.replace(/\\./g, \"-\")}`\n : \"unknown\";\n\n return {\n trn,\n labels: {\n ...(existingLabels ?? {}),\n [sdkNameLabelKey]: appName,\n \"sdk-version\": sdkVersion,\n },\n };\n}\n","import { type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n Subgraph_ServiceType,\n type SubgraphSchema,\n} from \"@tailor-proto/tailor/v1/application_resource_pb\";\nimport { fetchAll, resolveStaticWebsiteUrls, type OperatorClient } from \"@/cli/shared/client\";\nimport { createChangeSet } from \"./change-set\";\nimport { buildMetaRequest } from \"./label\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type {\n DeleteApplicationRequestSchema,\n CreateApplicationRequestSchema,\n UpdateApplicationRequestSchema,\n} from \"@tailor-proto/tailor/v1/application_pb\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Apply application changes for the given phase.\n * @param client - Operator client instance\n * @param changeSet - Planned application changes\n * @param phase - Apply phase\n * @returns Promise that resolves when applications are applied\n */\nexport async function applyApplication(\n client: OperatorClient,\n changeSet: Awaited<ReturnType<typeof planApplication>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n) {\n if (phase === \"create-update\") {\n // Applications\n await Promise.all([\n ...changeSet.creates.map(async (create) => {\n create.request.cors = await resolveStaticWebsiteUrls(\n client,\n create.request.workspaceId!,\n create.request.cors,\n \"CORS\",\n );\n await client.createApplication(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.updates.map(async (update) => {\n update.request.cors = await resolveStaticWebsiteUrls(\n client,\n update.request.workspaceId!,\n update.request.cors,\n \"CORS\",\n );\n await client.updateApplication(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n } else if (phase === \"delete\") {\n // Delete in reverse order of dependencies\n // Applications\n await Promise.all(\n changeSet.deletes.map(async (del) => {\n await client.deleteApplication(del.request);\n }),\n );\n }\n}\n\ntype CreateApplication = {\n name: string;\n request: MessageInitShape<typeof CreateApplicationRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateApplication = {\n name: string;\n request: MessageInitShape<typeof UpdateApplicationRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteApplication = {\n name: string;\n request: MessageInitShape<typeof DeleteApplicationRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:application:${name}`;\n}\n\n/**\n * Plan application changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes\n */\nexport async function planApplication(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const changeSet = createChangeSet<CreateApplication, UpdateApplication, DeleteApplication>(\n \"Applications\",\n );\n\n const existingApplications = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { applications, nextPageToken } = await client.listApplications({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [applications, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n\n if (forRemoval) {\n if (existingApplications.some((app) => app.name === application.name)) {\n changeSet.deletes.push({\n name: application.name,\n request: {\n workspaceId,\n applicationName: application.name,\n },\n });\n }\n changeSet.print();\n return changeSet;\n }\n\n // Skip application create/update when there are no subgraphs\n // (e.g. deploying only static web hosting)\n if (application.subgraphs.length === 0) {\n changeSet.print();\n return changeSet;\n }\n\n let authNamespace: string | undefined;\n let authIdpConfigName: string | undefined;\n if (application.authService && application.authService.config) {\n authNamespace = application.authService.config.name;\n\n const idProvider = application.authService.config.idProvider;\n if (idProvider) {\n authIdpConfigName = idProvider.name;\n }\n } else if (application.config.auth) {\n // Retrieve idpConfig from remote when auth references an external namespace\n authNamespace = application.config.auth.name;\n const idpConfigs = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { idpConfigs, nextPageToken } = await client.listAuthIDPConfigs({\n workspaceId,\n namespaceName: authNamespace!,\n pageToken,\n pageSize: maxPageSize,\n });\n return [idpConfigs, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n if (idpConfigs.length > 0) {\n authIdpConfigName = idpConfigs[0].name;\n }\n }\n const metaRequest = await buildMetaRequest(trn(workspaceId, application.name), application.name);\n\n if (existingApplications.some((app) => app.name === application.name)) {\n changeSet.updates.push({\n name: application.name,\n request: {\n workspaceId,\n applicationName: application.name,\n authNamespace,\n authIdpConfigName,\n cors: application.config.cors,\n subgraphs: application.subgraphs.map((subgraph) => protoSubgraph(subgraph)),\n allowedIpAddresses: application.config.allowedIpAddresses,\n disableIntrospection: application.config.disableIntrospection,\n },\n metaRequest,\n });\n } else {\n changeSet.creates.push({\n name: application.name,\n request: {\n workspaceId,\n applicationName: application.name,\n authNamespace,\n authIdpConfigName,\n cors: application.config.cors,\n subgraphs: application.subgraphs.map((subgraph) => protoSubgraph(subgraph)),\n allowedIpAddresses: application.config.allowedIpAddresses,\n disableIntrospection: application.config.disableIntrospection,\n },\n metaRequest,\n });\n }\n\n changeSet.print();\n return changeSet;\n}\n\nfunction protoSubgraph(\n subgraph: Readonly<{ Type: string; Name: string }>,\n): MessageInitShape<typeof SubgraphSchema> {\n // TODO(remiposo): Make it type-safe\n let serviceType: Subgraph_ServiceType;\n switch (subgraph.Type) {\n case \"tailordb\":\n serviceType = Subgraph_ServiceType.TAILORDB;\n break;\n case \"pipeline\":\n serviceType = Subgraph_ServiceType.PIPELINE;\n break;\n case \"idp\":\n serviceType = Subgraph_ServiceType.IDP;\n break;\n case \"auth\":\n serviceType = Subgraph_ServiceType.AUTH;\n break;\n default:\n throw new Error(`Unknown subgraph type: ${subgraph.Type}`);\n }\n return {\n serviceType,\n serviceNamespace: subgraph.Name,\n };\n}\n","import { type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n type CreateIdPClientRequestSchema,\n type CreateIdPServiceRequestSchema,\n type DeleteIdPClientRequestSchema,\n type DeleteIdPServiceRequestSchema,\n type UpdateIdPServiceRequestSchema,\n} from \"@tailor-proto/tailor/v1/idp_pb\";\nimport { IdPLang } from \"@tailor-proto/tailor/v1/idp_resource_pb\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { createChangeSet } from \"./change-set\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { IdP, IdPLang as IdPLangInput } from \"@/types/idp.generated\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Build the vault name for an IdP client.\n * @param namespaceName - IdP namespace name\n * @param clientName - IdP client name\n * @returns Vault name\n */\nexport function idpClientVaultName(namespaceName: string, clientName: string) {\n return `idp-${namespaceName}-${clientName}`;\n}\n\n/**\n * Build the secret name for an IdP client.\n * @param namespaceName - IdP namespace name\n * @param clientName - IdP client name\n * @returns Secret name\n */\nexport function idpClientSecretName(namespaceName: string, clientName: string) {\n return `client-secret-${namespaceName}-${clientName}`;\n}\n\n/**\n * Apply IdP-related changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned IdP changes\n * @param phase - Apply phase\n * @returns Promise that resolves when IdP changes are applied\n */\nexport async function applyIdP(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planIdP>>,\n phase: Exclude<ApplyPhase, \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // Services\n await Promise.all([\n ...changeSet.service.creates.map(async (create) => {\n await client.createIdPService(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.service.updates.map(async (update) => {\n await client.updateIdPService(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n\n // Clients\n await Promise.all([\n ...changeSet.client.creates.map(async (create) => {\n const resp = await client.createIdPClient(create.request);\n\n // Create the secret manager vault and secret\n const vaultName = idpClientVaultName(\n create.request.namespaceName!,\n create.request.client?.name || \"\",\n );\n const secretName = idpClientSecretName(\n create.request.namespaceName!,\n create.request.client?.name || \"\",\n );\n await client.createSecretManagerVault({\n workspaceId: create.request.workspaceId,\n secretmanagerVaultName: vaultName,\n });\n await client.createSecretManagerSecret({\n workspaceId: create.request.workspaceId,\n secretmanagerVaultName: vaultName,\n secretmanagerSecretName: secretName,\n secretmanagerSecretValue: resp.client?.clientSecret,\n });\n }),\n ...changeSet.client.updates.map(async (update) => {\n // Ensure the vault and secret exist\n const vaultName = idpClientVaultName(update.namespaceName, update.name);\n const secretName = idpClientSecretName(update.namespaceName, update.name);\n try {\n await client.getSecretManagerVault({\n workspaceId: update.workspaceId,\n secretmanagerVaultName: vaultName,\n });\n return;\n } catch (error) {\n if (!(error instanceof ConnectError && error.code === Code.NotFound)) {\n throw error;\n }\n }\n await client.createSecretManagerVault({\n workspaceId: update.workspaceId,\n secretmanagerVaultName: vaultName,\n });\n await client.createSecretManagerSecret({\n workspaceId: update.workspaceId,\n secretmanagerVaultName: vaultName,\n secretmanagerSecretName: secretName,\n secretmanagerSecretValue: update.clientSecret,\n });\n }),\n ]);\n } else if (phase === \"delete-resources\") {\n // Delete in reverse order of dependencies\n // Clients\n await Promise.all(\n changeSet.client.deletes.map(async (del) => {\n await client.deleteIdPClient(del.request);\n\n // Delete the secret manager vault and secret\n const vaultName = `idp-${del.request.namespaceName}-${del.request.name}`;\n await client.deleteSecretManagerVault({\n workspaceId: del.request.workspaceId,\n secretmanagerVaultName: vaultName,\n });\n }),\n );\n } else if (phase === \"delete-services\") {\n // Services only\n await Promise.all(changeSet.service.deletes.map((del) => client.deleteIdPService(del.request)));\n }\n}\n\n/**\n * Plan IdP-related changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes and metadata\n */\nexport async function planIdP(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const idps = forRemoval ? [] : application.idpServices;\n const {\n changeSet: serviceChangeSet,\n conflicts,\n unmanaged,\n resourceOwners,\n } = await planServices(client, workspaceId, application.name, idps);\n const deletedServices = serviceChangeSet.deletes.map((del) => del.name);\n const clientChangeSet = await planClients(client, workspaceId, idps, deletedServices);\n\n serviceChangeSet.print();\n clientChangeSet.print();\n return {\n changeSet: {\n service: serviceChangeSet,\n client: clientChangeSet,\n },\n conflicts,\n unmanaged,\n resourceOwners,\n };\n}\n\ntype CreateService = {\n name: string;\n request: MessageInitShape<typeof CreateIdPServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateService = {\n name: string;\n request: MessageInitShape<typeof UpdateIdPServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteService = {\n name: string;\n request: MessageInitShape<typeof DeleteIdPServiceRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:idp:${name}`;\n}\n\nasync function planServices(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n idps: ReadonlyArray<IdP>,\n) {\n const changeSet = createChangeSet<CreateService, UpdateService, DeleteService>(\"IdP services\");\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { idpServices, nextPageToken } = await client.listIdPServices({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [idpServices, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingServices: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n if (!resource.namespace?.name) {\n return;\n }\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.namespace.name),\n });\n existingServices[resource.namespace.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n for (const idp of idps) {\n const namespaceName = idp.name;\n const existing = existingServices[namespaceName];\n const metaRequest = await buildMetaRequest(trn(workspaceId, namespaceName), appName);\n let authorization;\n switch (idp.authorization) {\n case \"insecure\":\n authorization = \"true==true\";\n break;\n case \"loggedIn\":\n authorization = \"user != null && size(user.id) > 0\";\n break;\n default:\n authorization = idp.authorization.cel;\n break;\n }\n\n const lang = convertLang(idp.lang);\n const userAuthPolicy = idp.userAuthPolicy;\n\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"IdP service\",\n resourceName: idp.name,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"IdP service\",\n resourceName: idp.name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n authorization,\n lang,\n userAuthPolicy,\n publishUserEvents: idp.publishUserEvents,\n disableGqlOperations: convertGqlOperationsToDisable(idp.gqlOperations),\n },\n metaRequest,\n });\n delete existingServices[namespaceName];\n } else {\n changeSet.creates.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n authorization,\n lang,\n userAuthPolicy,\n publishUserEvents: idp.publishUserEvents,\n disableGqlOperations: convertGqlOperationsToDisable(idp.gqlOperations),\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingServices).forEach(([namespaceName]) => {\n const label = existingServices[namespaceName]?.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete services managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n });\n\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\ntype CreateClient = {\n name: string;\n request: MessageInitShape<typeof CreateIdPClientRequestSchema>;\n};\n\ntype UpdateClient = {\n name: string;\n workspaceId: string;\n namespaceName: string;\n clientSecret: string;\n};\n\ntype DeleteClient = {\n name: string;\n request: MessageInitShape<typeof DeleteIdPClientRequestSchema>;\n};\n\nasync function planClients(\n client: OperatorClient,\n workspaceId: string,\n idps: ReadonlyArray<IdP>,\n deletedServices: string[],\n) {\n const changeSet = createChangeSet<CreateClient, UpdateClient, DeleteClient>(\"IdP clients\");\n\n const fetchClients = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { clients, nextPageToken } = await client.listIdPClients({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [clients, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n const clientsByIdp = await Promise.all(idps.map((idp) => fetchClients(idp.name)));\n for (let i = 0; i < idps.length; i++) {\n const idp = idps[i];\n const namespaceName = idp.name;\n const existingClients = clientsByIdp[i];\n const existingNameMap = new Map<string, string>();\n existingClients.forEach((client) => {\n existingNameMap.set(client.name, client.clientSecret);\n });\n for (const name of idp.clients) {\n if (existingNameMap.has(name)) {\n changeSet.updates.push({\n name,\n workspaceId,\n namespaceName,\n clientSecret: existingNameMap.get(name)!,\n });\n existingNameMap.delete(name);\n } else {\n changeSet.creates.push({\n name,\n request: {\n workspaceId,\n namespaceName,\n client: {\n name,\n },\n },\n });\n }\n }\n existingNameMap.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName,\n name,\n },\n });\n });\n }\n\n const deletedClientsByService = await Promise.all(\n deletedServices.map((namespaceName) => fetchClients(namespaceName)),\n );\n for (let i = 0; i < deletedServices.length; i++) {\n const namespaceName = deletedServices[i];\n deletedClientsByService[i].forEach((client) => {\n changeSet.deletes.push({\n name: client.name,\n request: {\n workspaceId,\n namespaceName,\n name: client.name,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction convertLang(lang: IdPLangInput | undefined): IdPLang {\n switch (lang) {\n case \"en\":\n return IdPLang.EN;\n case \"ja\":\n return IdPLang.JA;\n default:\n return IdPLang.UNSPECIFIED;\n }\n}\n\n// Converts gqlOperations (enabled semantics, default true) to\n// disableGqlOperations (disabled semantics) for the Platform API.\n// Undefined fields are treated as true (enabled), matching TailorDB behavior.\nfunction convertGqlOperationsToDisable(\n gqlOperations: IdP[\"gqlOperations\"],\n): Record<string, boolean> | undefined {\n if (!gqlOperations) {\n return undefined;\n }\n return {\n create: gqlOperations.create === false,\n update: gqlOperations.update === false,\n delete: gqlOperations.delete === false,\n read: gqlOperations.read === false,\n sendPasswordResetEmail: gqlOperations.sendPasswordResetEmail === false,\n };\n}\n","import { fromJson, type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { ValueSchema } from \"@bufbuild/protobuf/wkt\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n AuthIDPConfig_AuthType,\n AuthOAuth2Client_ClientType,\n AuthOAuth2Client_GrantType,\n AuthSCIMAttribute_Mutability,\n AuthSCIMAttribute_Type,\n AuthSCIMAttribute_Uniqueness,\n AuthSCIMConfig_AuthorizationType,\n TenantProviderConfig_TenantProviderType,\n UserProfileProviderConfig_UserProfileProviderType,\n} from \"@tailor-proto/tailor/v1/auth_resource_pb\";\nimport { type AuthService } from \"@/cli/services/auth/service\";\nimport { fetchAll, resolveStaticWebsiteUrls, type OperatorClient } from \"@/cli/shared/client\";\nimport { OAuth2ClientSchema } from \"@/parser/service/auth\";\nimport { createChangeSet } from \"./change-set\";\nimport { idpClientSecretName, idpClientVaultName } from \"./idp\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { AuthAttributeValue } from \"@/types/auth\";\nimport type {\n BuiltinIdP,\n IdProvider as IdProviderConfig,\n OAuth2ClientInput,\n SCIMAttribute,\n SCIMConfig,\n SCIMResource,\n TenantProvider as TenantProviderConfig,\n} from \"@/types/auth.generated\";\nimport type {\n CreateAuthIDPConfigRequestSchema,\n CreateAuthMachineUserRequestSchema,\n CreateAuthOAuth2ClientRequestSchema,\n CreateAuthSCIMConfigRequestSchema,\n CreateAuthSCIMResourceRequestSchema,\n CreateAuthServiceRequestSchema,\n CreateTenantConfigRequestSchema,\n CreateUserProfileConfigRequestSchema,\n DeleteAuthIDPConfigRequestSchema,\n DeleteAuthMachineUserRequestSchema,\n DeleteAuthOAuth2ClientRequestSchema,\n DeleteAuthSCIMConfigRequestSchema,\n DeleteAuthSCIMResourceRequestSchema,\n DeleteAuthServiceRequestSchema,\n DeleteTenantConfigRequestSchema,\n DeleteUserProfileConfigRequestSchema,\n UpdateAuthIDPConfigRequestSchema,\n UpdateAuthMachineUserRequestSchema,\n UpdateAuthOAuth2ClientRequestSchema,\n UpdateAuthSCIMConfigRequestSchema,\n UpdateAuthSCIMResourceRequestSchema,\n UpdateAuthServiceRequestSchema,\n UpdateTenantConfigRequestSchema,\n UpdateUserProfileConfigRequestSchema,\n} from \"@tailor-proto/tailor/v1/auth_pb\";\nimport type {\n AuthIDPConfig_ConfigSchema,\n AuthIDPConfigSchema,\n AuthOAuth2ClientSchema,\n AuthSCIMAttributeSchema,\n AuthSCIMConfigSchema,\n AuthSCIMResourceSchema,\n TenantProviderConfigSchema,\n UserProfileProviderConfigSchema,\n} from \"@tailor-proto/tailor/v1/auth_resource_pb\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Apply auth-related changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned auth changes\n * @param phase - Apply phase (defaults to \"create-update\")\n * @returns Promise that resolves when auth changes are applied\n */\nexport async function applyAuth(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planAuth>>,\n phase: Exclude<ApplyPhase, \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // Services\n await Promise.all([\n ...changeSet.service.creates.map(async (create) => {\n await client.createAuthService(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.service.updates.map(async (update) => {\n await client.updateAuthService(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n\n // IdPConfigs\n await Promise.all([\n ...changeSet.idpConfig.creates.map(async (create) => {\n if (create.idpConfig.kind === \"BuiltInIdP\") {\n create.request.idpConfig!.config = await protoBuiltinIdPConfig(\n client,\n create.request.workspaceId!,\n create.idpConfig,\n );\n }\n return client.createAuthIDPConfig(create.request);\n }),\n ...changeSet.idpConfig.updates.map(async (update) => {\n if (update.idpConfig.kind === \"BuiltInIdP\") {\n update.request.idpConfig!.config = await protoBuiltinIdPConfig(\n client,\n update.request.workspaceId!,\n update.idpConfig,\n );\n }\n return client.updateAuthIDPConfig(update.request);\n }),\n ]);\n\n // UserProfileConfigs\n await Promise.all([\n ...changeSet.userProfileConfig.creates.map((create) =>\n client.createUserProfileConfig(create.request),\n ),\n ...changeSet.userProfileConfig.updates.map((update) =>\n client.updateUserProfileConfig(update.request),\n ),\n ]);\n\n // TenantConfigs\n await Promise.all([\n ...changeSet.tenantConfig.creates.map((create) => client.createTenantConfig(create.request)),\n ...changeSet.tenantConfig.updates.map((update) => client.updateTenantConfig(update.request)),\n ]);\n\n // MachineUsers\n await Promise.all([\n ...changeSet.machineUser.creates.map((create) =>\n client.createAuthMachineUser(create.request),\n ),\n ...changeSet.machineUser.updates.map((update) =>\n client.updateAuthMachineUser(update.request),\n ),\n ]);\n\n // OAuth2Clients\n await Promise.all([\n ...changeSet.oauth2Client.creates.map(async (create) => {\n create.request.oauth2Client!.redirectUris = await resolveStaticWebsiteUrls(\n client,\n create.request.workspaceId!,\n create.request.oauth2Client!.redirectUris,\n \"OAuth2 redirect URIs\",\n );\n return client.createAuthOAuth2Client(create.request);\n }),\n ...changeSet.oauth2Client.updates.map(async (update) => {\n update.request.oauth2Client!.redirectUris = await resolveStaticWebsiteUrls(\n client,\n update.request.workspaceId!,\n update.request.oauth2Client!.redirectUris,\n \"OAuth2 redirect URIs\",\n );\n return client.updateAuthOAuth2Client(update.request);\n }),\n ]);\n\n // OAuth2Clients replaces (client type changed): delete then create sequentially\n for (const replace of changeSet.oauth2Client.replaces) {\n await client.deleteAuthOAuth2Client(replace.deleteRequest);\n replace.createRequest.oauth2Client!.redirectUris = await resolveStaticWebsiteUrls(\n client,\n replace.createRequest.workspaceId!,\n replace.createRequest.oauth2Client!.redirectUris,\n \"OAuth2 redirect URIs\",\n );\n await client.createAuthOAuth2Client(replace.createRequest);\n }\n\n // SCIMConfigs\n await Promise.all([\n ...changeSet.scim.creates.map((create) => client.createAuthSCIMConfig(create.request)),\n ...changeSet.scim.updates.map((update) => client.updateAuthSCIMConfig(update.request)),\n ]);\n\n // SCIMResources\n await Promise.all([\n ...changeSet.scimResource.creates.map((create) =>\n client.createAuthSCIMResource(create.request),\n ),\n ...changeSet.scimResource.updates.map((update) =>\n client.updateAuthSCIMResource(update.request),\n ),\n ]);\n } else if (phase === \"delete-resources\") {\n // Delete in reverse order of dependencies\n // SCIMResources\n await Promise.all(\n changeSet.scimResource.deletes.map((del) => client.deleteAuthSCIMResource(del.request)),\n );\n\n // SCIMConfigs\n await Promise.all(\n changeSet.scim.deletes.map((del) => client.deleteAuthSCIMConfig(del.request)),\n );\n\n // OAuth2Clients\n await Promise.all(\n changeSet.oauth2Client.deletes.map((del) => client.deleteAuthOAuth2Client(del.request)),\n );\n\n // MachineUsers\n await Promise.all(\n changeSet.machineUser.deletes.map((del) => client.deleteAuthMachineUser(del.request)),\n );\n\n // TenantConfigs\n await Promise.all(\n changeSet.tenantConfig.deletes.map((del) => client.deleteTenantConfig(del.request)),\n );\n\n // UserProfileConfigs\n await Promise.all(\n changeSet.userProfileConfig.deletes.map((del) => client.deleteUserProfileConfig(del.request)),\n );\n\n // IdPConfigs\n await Promise.all(\n changeSet.idpConfig.deletes.map((del) => client.deleteAuthIDPConfig(del.request)),\n );\n } else if (phase === \"delete-services\") {\n // Services only\n await Promise.all(\n changeSet.service.deletes.map((del) => client.deleteAuthService(del.request)),\n );\n }\n}\n\n/**\n * Plan auth-related changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned auth changes and metadata\n */\nexport async function planAuth(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const auths: Readonly<AuthService>[] = [];\n if (!forRemoval && application.authService) {\n await application.authService.resolveNamespaces();\n auths.push(application.authService);\n }\n const {\n changeSet: serviceChangeSet,\n conflicts,\n unmanaged,\n resourceOwners,\n } = await planServices(client, workspaceId, application.name, auths);\n const deletedServices = serviceChangeSet.deletes.map((del) => del.name);\n const [\n idpConfigChangeSet,\n userProfileConfigChangeSet,\n tenantConfigChangeSet,\n machineUserChangeSet,\n oauth2ClientChangeSet,\n scimChangeSet,\n scimResourceChangeSet,\n ] = await Promise.all([\n planIdPConfigs(client, workspaceId, auths, deletedServices),\n planUserProfileConfigs(client, workspaceId, auths, deletedServices),\n planTenantConfigs(client, workspaceId, auths, deletedServices),\n planMachineUsers(client, workspaceId, auths, deletedServices),\n planOAuth2Clients(client, workspaceId, auths, deletedServices),\n planSCIMConfigs(client, workspaceId, auths, deletedServices),\n planSCIMResources(client, workspaceId, auths, deletedServices),\n ]);\n\n serviceChangeSet.print();\n idpConfigChangeSet.print();\n userProfileConfigChangeSet.print();\n tenantConfigChangeSet.print();\n machineUserChangeSet.print();\n oauth2ClientChangeSet.print();\n scimChangeSet.print();\n scimResourceChangeSet.print();\n return {\n changeSet: {\n service: serviceChangeSet,\n idpConfig: idpConfigChangeSet,\n userProfileConfig: userProfileConfigChangeSet,\n tenantConfig: tenantConfigChangeSet,\n machineUser: machineUserChangeSet,\n oauth2Client: oauth2ClientChangeSet,\n scim: scimChangeSet,\n scimResource: scimResourceChangeSet,\n },\n conflicts,\n unmanaged,\n resourceOwners,\n };\n}\n\ntype CreateService = {\n name: string;\n request: MessageInitShape<typeof CreateAuthServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateService = {\n name: string;\n request: MessageInitShape<typeof UpdateAuthServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteService = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthServiceRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:auth:${name}`;\n}\n\nasync function planServices(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n) {\n const changeSet = createChangeSet<CreateService, UpdateService, DeleteService>(\"Auth services\");\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { authServices, nextPageToken } = await client.listAuthServices({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [authServices, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingServices: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n if (!resource.namespace?.name) {\n return;\n }\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.namespace.name),\n });\n existingServices[resource.namespace.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const existing = existingServices[config.name];\n const metaRequest = await buildMetaRequest(trn(workspaceId, config.name), appName);\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Auth service\",\n resourceName: config.name,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"Auth service\",\n resourceName: config.name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: config.name,\n request: {\n workspaceId,\n namespaceName: config.name,\n publishSessionEvents: config.publishSessionEvents,\n },\n metaRequest,\n });\n delete existingServices[config.name];\n } else {\n changeSet.creates.push({\n name: config.name,\n request: {\n workspaceId,\n namespaceName: config.name,\n publishSessionEvents: config.publishSessionEvents,\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingServices).forEach(([namespaceName]) => {\n const label = existingServices[namespaceName]?.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete services managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n });\n\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\ntype CreateIdPConfig = {\n name: string;\n idpConfig: Readonly<IdProviderConfig>;\n request: MessageInitShape<typeof CreateAuthIDPConfigRequestSchema>;\n};\n\ntype UpdateIdPConfig = {\n name: string;\n idpConfig: Readonly<IdProviderConfig>;\n request: MessageInitShape<typeof UpdateAuthIDPConfigRequestSchema>;\n};\n\ntype DeleteIdPConfig = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthIDPConfigRequestSchema>;\n};\n\nasync function planIdPConfigs(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateIdPConfig, UpdateIdPConfig, DeleteIdPConfig>(\n \"Auth idpConfigs\",\n );\n\n const fetchIdPConfigs = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { idpConfigs, nextPageToken } = await client.listAuthIDPConfigs({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [idpConfigs, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n for (const authService of auths) {\n const { parsedConfig: config } = authService;\n const existingIdPConfigs = await fetchIdPConfigs(config.name);\n const existingNameSet = new Set<string>();\n existingIdPConfigs.forEach((idpConfig) => {\n existingNameSet.add(idpConfig.name);\n });\n const idpConfig = config.idProvider;\n if (idpConfig) {\n if (existingNameSet.has(idpConfig.name)) {\n changeSet.updates.push({\n name: idpConfig.name,\n idpConfig,\n request: {\n workspaceId,\n namespaceName: config.name,\n idpConfig: protoIdPConfig(idpConfig),\n },\n });\n existingNameSet.delete(idpConfig.name);\n } else {\n changeSet.creates.push({\n name: idpConfig.name,\n idpConfig,\n request: {\n workspaceId,\n namespaceName: config.name,\n idpConfig: protoIdPConfig(idpConfig),\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n name,\n },\n });\n });\n }\n\n for (const namespaceName of deletedServices) {\n const existingIdPConfigs = await fetchIdPConfigs(namespaceName);\n existingIdPConfigs.forEach((idpConfig) => {\n changeSet.deletes.push({\n name: idpConfig.name,\n request: {\n workspaceId,\n namespaceName,\n name: idpConfig.name,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction protoIdPConfig(idpConfig: IdProviderConfig): MessageInitShape<typeof AuthIDPConfigSchema> {\n switch (idpConfig.kind) {\n case \"IDToken\":\n return {\n name: idpConfig.name,\n authType: AuthIDPConfig_AuthType.ID_TOKEN,\n config: {\n config: {\n case: \"idToken\",\n value: {\n providerUrl: idpConfig.providerURL,\n clientId: idpConfig.clientID,\n issuerUrl: idpConfig.issuerURL,\n usernameClaim: idpConfig.usernameClaim,\n },\n },\n },\n };\n case \"SAML\":\n return {\n name: idpConfig.name,\n authType: AuthIDPConfig_AuthType.SAML,\n config: {\n config: {\n case: \"saml\",\n value: {\n ...(idpConfig.metadataURL !== undefined\n ? { metadataUrl: idpConfig.metadataURL }\n : { rawMetadata: idpConfig.rawMetadata! }),\n enableSignRequest: idpConfig.enableSignRequest,\n },\n },\n },\n };\n case \"OIDC\":\n return {\n name: idpConfig.name,\n authType: AuthIDPConfig_AuthType.OIDC,\n config: {\n config: {\n case: \"oidc\",\n value: {\n clientIdKey: idpConfig.clientID,\n clientSecretKey: {\n vaultName: idpConfig.clientSecret.vaultName,\n secretKey: idpConfig.clientSecret.secretKey,\n },\n providerUrl: idpConfig.providerURL,\n issuerUrl: idpConfig.issuerURL,\n usernameClaim: idpConfig.usernameClaim,\n },\n },\n },\n };\n case \"BuiltInIdP\":\n return {\n name: idpConfig.name,\n authType: AuthIDPConfig_AuthType.OIDC,\n // config is set at apply time\n config: {},\n };\n default:\n throw new Error(`Unexpected idp kind: ${idpConfig satisfies never}`);\n }\n}\n\nasync function protoBuiltinIdPConfig(\n client: OperatorClient,\n workspaceId: string,\n builtinIdPConfig: BuiltinIdP,\n): Promise<MessageInitShape<typeof AuthIDPConfig_ConfigSchema>> {\n let idpService;\n try {\n idpService = await client.getIdPService({\n workspaceId,\n namespaceName: builtinIdPConfig.namespace,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(\n `Built-in IdP \"${builtinIdPConfig.namespace}\" not found. Please ensure that idp is configured correctly.`,\n );\n }\n throw error;\n }\n const idpClient = await client.getIdPClient({\n workspaceId,\n namespaceName: builtinIdPConfig.namespace,\n name: builtinIdPConfig.clientName,\n });\n const vaultName = idpClientVaultName(builtinIdPConfig.namespace, builtinIdPConfig.clientName);\n const secretKey = idpClientSecretName(builtinIdPConfig.namespace, builtinIdPConfig.clientName);\n return {\n config: {\n case: \"oidc\",\n value: {\n clientIdKey: idpClient.client?.clientId,\n clientSecretKey: {\n vaultName,\n secretKey,\n },\n providerUrl: idpService.idpService?.providerUrl,\n usernameClaim: \"name\",\n },\n },\n };\n}\n\ntype CreateUserProfileConfig = {\n name: string;\n request: MessageInitShape<typeof CreateUserProfileConfigRequestSchema>;\n};\n\ntype UpdateUserProfileConfig = {\n name: string;\n request: MessageInitShape<typeof UpdateUserProfileConfigRequestSchema>;\n};\n\ntype DeleteUserProfileConfig = {\n name: string;\n request: MessageInitShape<typeof DeleteUserProfileConfigRequestSchema>;\n};\n\nasync function planUserProfileConfigs(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<\n CreateUserProfileConfig,\n UpdateUserProfileConfig,\n DeleteUserProfileConfig\n >(\"Auth userProfileConfigs\");\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const name = `${config.name}-user-profile-config`;\n try {\n await client.getUserProfileConfig({\n workspaceId,\n namespaceName: config.name,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n const userProfileForCreate = auth.userProfile;\n if (userProfileForCreate) {\n changeSet.creates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n userProfileProviderConfig: protoUserProfileConfig(userProfileForCreate),\n },\n });\n }\n continue;\n }\n throw error;\n }\n const userProfileForUpdate = auth.userProfile;\n if (userProfileForUpdate) {\n changeSet.updates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n userProfileProviderConfig: protoUserProfileConfig(userProfileForUpdate),\n },\n });\n } else {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n },\n });\n }\n }\n\n for (const namespaceName of deletedServices) {\n try {\n await client.getUserProfileConfig({\n workspaceId,\n namespaceName,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n continue;\n }\n throw error;\n }\n changeSet.deletes.push({\n name: `${namespaceName}-user-profile-config`,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n return changeSet;\n}\n\nfunction protoUserProfileConfig(\n userProfile: NonNullable<AuthService[\"userProfile\"]>,\n): MessageInitShape<typeof UserProfileProviderConfigSchema> {\n // Convert attributes from { key: true } to { key: \"key\" }\n const attributeMap = userProfile.attributes\n ? Object.fromEntries(Object.keys(userProfile.attributes).map((key) => [key, key]))\n : undefined;\n\n return {\n provider: \"TAILORDB\",\n providerType: UserProfileProviderConfig_UserProfileProviderType.TAILORDB,\n config: {\n config: {\n case: \"tailordb\",\n value: {\n namespace: userProfile.namespace,\n type: userProfile.type.name,\n usernameField: userProfile.usernameField,\n tenantIdField: undefined,\n attributesFields: userProfile.attributeList,\n attributeMap,\n },\n },\n },\n };\n}\n\ntype CreateTenantConfig = {\n name: string;\n request: MessageInitShape<typeof CreateTenantConfigRequestSchema>;\n};\n\ntype UpdateTenantConfig = {\n name: string;\n request: MessageInitShape<typeof UpdateTenantConfigRequestSchema>;\n};\n\ntype DeleteTenantConfig = {\n name: string;\n request: MessageInitShape<typeof DeleteTenantConfigRequestSchema>;\n};\n\nasync function planTenantConfigs(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateTenantConfig, UpdateTenantConfig, DeleteTenantConfig>(\n \"Auth tenantConfigs\",\n );\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const name = `${config.name}-tenant-config`;\n try {\n await client.getTenantConfig({\n workspaceId,\n namespaceName: config.name,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n if (config.tenantProvider) {\n changeSet.creates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n tenantProviderConfig: protoTenantConfig(config.tenantProvider),\n },\n });\n }\n continue;\n }\n throw error;\n }\n if (config.tenantProvider) {\n changeSet.updates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n tenantProviderConfig: protoTenantConfig(config.tenantProvider),\n },\n });\n } else {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n },\n });\n }\n }\n\n for (const namespaceName of deletedServices) {\n try {\n await client.getTenantConfig({\n workspaceId,\n namespaceName,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n continue;\n }\n throw error;\n }\n changeSet.deletes.push({\n name: `${namespaceName}-tenant-config`,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n return changeSet;\n}\n\nfunction protoTenantConfig(\n tenantConfig: TenantProviderConfig,\n): MessageInitShape<typeof TenantProviderConfigSchema> {\n return {\n providerType: TenantProviderConfig_TenantProviderType.TAILORDB,\n config: {\n config: {\n case: \"tailordb\",\n value: {\n namespace: tenantConfig.namespace,\n type: tenantConfig.type,\n signatureField: tenantConfig.signatureField,\n },\n },\n },\n };\n}\n\ntype CreateMachineUser = {\n name: string;\n request: MessageInitShape<typeof CreateAuthMachineUserRequestSchema>;\n};\n\ntype UpdateMachineUser = {\n name: string;\n request: MessageInitShape<typeof UpdateAuthMachineUserRequestSchema>;\n};\n\ntype DeleteMachineUser = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthMachineUserRequestSchema>;\n};\n\nasync function planMachineUsers(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateMachineUser, UpdateMachineUser, DeleteMachineUser>(\n \"Auth machineUsers\",\n );\n\n const fetchMachineUsers = (authNamespace: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { machineUsers, nextPageToken } = await client.listAuthMachineUsers({\n workspaceId,\n authNamespace,\n pageToken,\n pageSize: maxPageSize,\n });\n return [machineUsers, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const existingMachineUsers = await fetchMachineUsers(config.name);\n const existingNameSet = new Set<string>();\n existingMachineUsers.forEach((machineUser) => {\n existingNameSet.add(machineUser.name);\n });\n for (const machineUsername of Object.keys(config.machineUsers ?? {})) {\n const machineUser = config.machineUsers?.[machineUsername];\n if (!machineUser) {\n continue;\n }\n if (existingNameSet.has(machineUsername)) {\n changeSet.updates.push({\n name: machineUsername,\n request: {\n workspaceId,\n authNamespace: config.name,\n name: machineUsername,\n attributes: machineUser.attributeList,\n attributeMap: machineUser.attributes\n ? protoMachineUserAttributeMap(machineUser.attributes)\n : undefined,\n },\n });\n existingNameSet.delete(machineUsername);\n } else {\n changeSet.creates.push({\n name: machineUsername,\n request: {\n workspaceId,\n authNamespace: config.name,\n name: machineUsername,\n attributes: machineUser.attributeList,\n attributeMap: machineUser.attributes\n ? protoMachineUserAttributeMap(machineUser.attributes)\n : undefined,\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n authNamespace: config.name,\n name,\n },\n });\n });\n }\n\n for (const namespaceName of deletedServices) {\n const existingMachineUsers = await fetchMachineUsers(namespaceName);\n existingMachineUsers.forEach((machineUser) => {\n changeSet.deletes.push({\n name: machineUser.name,\n request: {\n workspaceId,\n authNamespace: namespaceName,\n name: machineUser.name,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction protoMachineUserAttributeMap(\n attributeMap: Record<string, AuthAttributeValue>,\n): Record<string, MessageInitShape<typeof ValueSchema>> {\n const ret: Record<string, MessageInitShape<typeof ValueSchema>> = {};\n for (const [key, value] of Object.entries(attributeMap)) {\n ret[key] = fromJson(ValueSchema, value ?? null);\n }\n return ret;\n}\n\ntype CreateOAuth2Clients = {\n name: string;\n request: MessageInitShape<typeof CreateAuthOAuth2ClientRequestSchema>;\n};\n\ntype UpdateOAuth2Client = {\n name: string;\n request: MessageInitShape<typeof UpdateAuthOAuth2ClientRequestSchema>;\n};\n\ntype DeleteOAuth2Client = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthOAuth2ClientRequestSchema>;\n};\n\ntype ReplaceOAuth2Client = {\n name: string;\n deleteRequest: MessageInitShape<typeof DeleteAuthOAuth2ClientRequestSchema>;\n createRequest: MessageInitShape<typeof CreateAuthOAuth2ClientRequestSchema>;\n};\n\nasync function planOAuth2Clients(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<\n CreateOAuth2Clients,\n UpdateOAuth2Client,\n DeleteOAuth2Client,\n ReplaceOAuth2Client\n >(\"Auth oauth2Clients\");\n\n const fetchOAuth2Clients = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { oauth2Clients, nextPageToken } = await client.listAuthOAuth2Clients({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [oauth2Clients, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const existingOAuth2Clients = await fetchOAuth2Clients(config.name);\n const existingClientsMap = new Map<string, AuthOAuth2Client_ClientType>();\n existingOAuth2Clients.forEach((oauth2Client) => {\n existingClientsMap.set(oauth2Client.name, oauth2Client.clientType);\n });\n for (const oauth2ClientName of Object.keys(config.oauth2Clients ?? {})) {\n const oauth2Client = config.oauth2Clients?.[oauth2ClientName];\n if (!oauth2Client) {\n continue;\n }\n const newOAuth2Client = protoOAuth2Client(oauth2ClientName, oauth2Client);\n if (existingClientsMap.has(oauth2ClientName)) {\n const existingClientType = existingClientsMap.get(oauth2ClientName)!;\n if (existingClientType !== newOAuth2Client.clientType) {\n // Client type changed: need to replace (delete then create)\n changeSet.replaces.push({\n name: oauth2ClientName,\n deleteRequest: {\n workspaceId,\n namespaceName: config.name,\n name: oauth2ClientName,\n },\n createRequest: {\n workspaceId,\n namespaceName: config.name,\n oauth2Client: newOAuth2Client,\n },\n });\n } else {\n changeSet.updates.push({\n name: oauth2ClientName,\n request: {\n workspaceId,\n namespaceName: config.name,\n oauth2Client: newOAuth2Client,\n },\n });\n }\n existingClientsMap.delete(oauth2ClientName);\n } else {\n changeSet.creates.push({\n name: oauth2ClientName,\n request: {\n workspaceId,\n namespaceName: config.name,\n oauth2Client: newOAuth2Client,\n },\n });\n }\n }\n existingClientsMap.forEach((_, name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n name,\n },\n });\n });\n }\n\n for (const namespaceName of deletedServices) {\n const existingOAuth2Clients = await fetchOAuth2Clients(namespaceName);\n existingOAuth2Clients.forEach((oauth2Client) => {\n changeSet.deletes.push({\n name: oauth2Client.name,\n request: {\n workspaceId,\n namespaceName,\n name: oauth2Client.name,\n },\n });\n });\n }\n\n return changeSet;\n}\n\nfunction protoOAuth2Client(\n oauth2ClientName: string,\n oauth2Client: OAuth2ClientInput,\n): MessageInitShape<typeof AuthOAuth2ClientSchema> {\n // Parse to transform token lifetimes\n const parsed = OAuth2ClientSchema.parse(oauth2Client);\n\n return {\n name: oauth2ClientName,\n description: parsed.description,\n grantTypes: parsed.grantTypes?.map((grantType) => {\n switch (grantType) {\n case \"authorization_code\":\n return AuthOAuth2Client_GrantType.AUTHORIZATION_CODE;\n case \"refresh_token\":\n return AuthOAuth2Client_GrantType.REFRESH_TOKEN;\n default:\n throw new Error(`Unknown OAuth2 client grant type: ${grantType satisfies never}`);\n }\n }),\n redirectUris: parsed.redirectURIs,\n clientType: (\n {\n confidential: AuthOAuth2Client_ClientType.CONFIDENTIAL,\n public: AuthOAuth2Client_ClientType.PUBLIC,\n browser: AuthOAuth2Client_ClientType.BROWSER,\n } satisfies Record<NonNullable<OAuth2ClientInput[\"clientType\"]>, AuthOAuth2Client_ClientType>\n )[parsed.clientType ?? \"confidential\"],\n accessTokenLifetime: parsed.accessTokenLifetimeSeconds,\n refreshTokenLifetime: parsed.refreshTokenLifetimeSeconds,\n requireDpop: parsed.requireDpop,\n };\n}\n\ntype CreateSCIMConfig = {\n name: string;\n request: MessageInitShape<typeof CreateAuthSCIMConfigRequestSchema>;\n};\n\ntype UpdateSCIMConfig = {\n name: string;\n request: MessageInitShape<typeof UpdateAuthSCIMConfigRequestSchema>;\n};\n\ntype DeleteSCIMConfig = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthSCIMConfigRequestSchema>;\n};\n\nasync function planSCIMConfigs(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateSCIMConfig, UpdateSCIMConfig, DeleteSCIMConfig>(\n \"Auth scimConfigs\",\n );\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const name = `${config.name}-scim-config`;\n try {\n await client.getAuthSCIMConfig({\n workspaceId,\n namespaceName: config.name,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n if (config.scim) {\n changeSet.creates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n scimConfig: protoSCIMConfig(config.scim),\n },\n });\n }\n continue;\n }\n throw error;\n }\n if (config.scim) {\n changeSet.updates.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n scimConfig: protoSCIMConfig(config.scim),\n },\n });\n } else {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n },\n });\n }\n }\n\n for (const namespaceName of deletedServices) {\n try {\n await client.getAuthSCIMConfig({\n workspaceId,\n namespaceName,\n });\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n continue;\n }\n throw error;\n }\n changeSet.deletes.push({\n name: `${namespaceName}-scim-config`,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n return changeSet;\n}\n\nfunction protoSCIMConfig(scimConfig: SCIMConfig): MessageInitShape<typeof AuthSCIMConfigSchema> {\n let authorizationType;\n switch (scimConfig.authorization.type) {\n case \"bearer\":\n authorizationType = AuthSCIMConfig_AuthorizationType.BEARER;\n break;\n case \"oauth2\":\n authorizationType = AuthSCIMConfig_AuthorizationType.OAUTH2;\n break;\n default:\n throw new Error(\n `Unknown SCIM authorization type: ${scimConfig.authorization.type satisfies never}`,\n );\n }\n\n return {\n machineUserName: scimConfig.machineUserName,\n authorizationType,\n authorizationConfig: {\n case: \"bearerSecret\",\n value: {\n vaultName: scimConfig.authorization.bearerSecret?.vaultName,\n secretKey: scimConfig.authorization.bearerSecret?.secretKey,\n },\n },\n };\n}\n\ntype CreateSCIMResource = {\n name: string;\n request: MessageInitShape<typeof CreateAuthSCIMResourceRequestSchema>;\n};\n\ntype UpdateSCIMResource = {\n name: string;\n request: MessageInitShape<typeof UpdateAuthSCIMResourceRequestSchema>;\n};\n\ntype DeleteSCIMResource = {\n name: string;\n request: MessageInitShape<typeof DeleteAuthSCIMResourceRequestSchema>;\n};\n\nasync function planSCIMResources(\n client: OperatorClient,\n workspaceId: string,\n auths: ReadonlyArray<Readonly<AuthService>>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateSCIMResource, UpdateSCIMResource, DeleteSCIMResource>(\n \"Auth scimResources\",\n );\n\n const fetchSCIMResources = async (namespaceName: string) => {\n try {\n const { scimResources } = await client.getAuthSCIMResources({\n workspaceId,\n namespaceName,\n });\n return scimResources;\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [];\n }\n throw error;\n }\n };\n\n for (const auth of auths) {\n const { parsedConfig: config } = auth;\n const existingSCIMResources = await fetchSCIMResources(config.name);\n const existingNameSet = new Set<string>();\n existingSCIMResources.forEach((scimResource) => {\n existingNameSet.add(scimResource.name);\n });\n for (const scimResource of config.scim?.resources ?? []) {\n if (existingNameSet.has(scimResource.name)) {\n changeSet.updates.push({\n name: scimResource.name,\n request: {\n workspaceId,\n namespaceName: config.name,\n scimResource: protoSCIMResource(scimResource),\n },\n });\n existingNameSet.delete(scimResource.name);\n } else {\n changeSet.creates.push({\n name: scimResource.name,\n request: {\n workspaceId,\n namespaceName: config.name,\n scimResource: protoSCIMResource(scimResource),\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: config.name,\n name,\n },\n });\n });\n }\n\n for (const namespaceName of deletedServices) {\n const existingSCIMResources = await fetchSCIMResources(namespaceName);\n existingSCIMResources.forEach((scimResource) => {\n changeSet.deletes.push({\n name: scimResource.name,\n request: {\n workspaceId,\n namespaceName,\n name: scimResource.name,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction protoSCIMResource(\n scimResource: SCIMResource,\n): MessageInitShape<typeof AuthSCIMResourceSchema> {\n return {\n name: scimResource.name,\n tailorDbNamespace: scimResource.tailorDBNamespace,\n tailorDbType: scimResource.tailorDBType,\n coreSchema: {\n name: scimResource.coreSchema.name,\n attributes: scimResource.coreSchema.attributes.map((attr) => protoSCIMAttribute(attr)),\n },\n attributeMapping: scimResource.attributeMapping.map((attr) => ({\n tailorDbField: attr.tailorDBField,\n scimPath: attr.scimPath,\n })),\n };\n}\n\nfunction protoSCIMAttribute(attr: SCIMAttribute): MessageInitShape<typeof AuthSCIMAttributeSchema> {\n let typ;\n switch (attr.type) {\n case \"string\":\n typ = AuthSCIMAttribute_Type.STRING;\n break;\n case \"number\":\n typ = AuthSCIMAttribute_Type.NUMBER;\n break;\n case \"boolean\":\n typ = AuthSCIMAttribute_Type.BOOLEAN;\n break;\n case \"datetime\":\n typ = AuthSCIMAttribute_Type.DATETIME;\n break;\n case \"complex\":\n typ = AuthSCIMAttribute_Type.COMPLEX;\n break;\n default:\n throw new Error(`Unknown SCIM attribute type: ${attr.type satisfies never}`);\n }\n let mutability;\n if (attr.mutability) {\n switch (attr.mutability) {\n case \"readOnly\":\n mutability = AuthSCIMAttribute_Mutability.READ_ONLY;\n break;\n case \"readWrite\":\n mutability = AuthSCIMAttribute_Mutability.READ_WRITE;\n break;\n case \"writeOnly\":\n mutability = AuthSCIMAttribute_Mutability.WRITE_ONLY;\n break;\n default:\n throw new Error(`Unknown SCIM attribute mutability: ${attr.mutability satisfies never}`);\n }\n }\n let uniqueness;\n if (attr.uniqueness) {\n switch (attr.uniqueness) {\n case \"none\":\n uniqueness = AuthSCIMAttribute_Uniqueness.NONE;\n break;\n case \"server\":\n uniqueness = AuthSCIMAttribute_Uniqueness.SERVER;\n break;\n case \"global\":\n uniqueness = AuthSCIMAttribute_Uniqueness.GLOBAL;\n break;\n default:\n throw new Error(`Unknown SCIM attribute uniqueness: ${attr.uniqueness satisfies never}`);\n }\n }\n return {\n type: typ,\n name: attr.name,\n description: attr.description,\n mutability,\n required: attr.required,\n multiValued: attr.multiValued,\n uniqueness,\n canonicalValues: attr.canonicalValues ?? undefined,\n subAttributes: attr.subAttributes?.map((attr) => protoSCIMAttribute(attr)),\n };\n}\n","import ml from \"multiline-ts\";\nimport { styles, logger } from \"@/cli/shared/logger\";\n\nexport interface OwnerConflict {\n resourceType: string;\n resourceName: string;\n currentOwner: string;\n}\n\nexport interface UnmanagedResource {\n resourceType: string;\n resourceName: string;\n}\n\n/**\n * Confirm reassignment of resources when owner conflicts are detected.\n * @param conflicts - Detected owner conflicts\n * @param appName - Target application name\n * @param yes - Whether to auto-confirm without prompting\n * @returns Promise that resolves when confirmation completes\n */\nexport async function confirmOwnerConflict(\n conflicts: OwnerConflict[],\n appName: string,\n yes: boolean,\n): Promise<void> {\n if (conflicts.length === 0) return;\n\n const currentOwners = [...new Set(conflicts.map((c) => c.currentOwner))];\n\n logger.warn(\"Application name mismatch detected:\");\n\n logger.log(\n ` ${styles.warning(\"Current application(s)\")}: ${currentOwners.map((o) => styles.bold(`\"${o}\"`)).join(\", \")}`,\n );\n logger.log(` ${styles.success(\"New application\")}: ${styles.bold(`\"${appName}\"`)}`);\n logger.newline();\n logger.log(` ${styles.info(\"Resources\")}:`);\n for (const c of conflicts) {\n logger.log(` • ${styles.bold(c.resourceType)} ${styles.info(`\"${c.resourceName}\"`)}`);\n }\n\n if (yes) {\n logger.success(\"Updating resources (--yes flag specified)...\", {\n mode: \"plain\",\n });\n return;\n }\n\n const promptMessage =\n currentOwners.length === 1\n ? `Update these resources to be managed by \"${appName}\"?\\n${styles.dim(\"(Common when renaming your application)\")}`\n : `Update these resources to be managed by \"${appName}\"?`;\n const confirmed = await logger.prompt(promptMessage, {\n type: \"confirm\",\n initial: false,\n });\n if (!confirmed) {\n throw new Error(ml`\n Apply cancelled. Resources remain managed by their current applications.\n To override, run again and confirm, or use --yes flag.\n `);\n }\n}\n\n/**\n * Confirm allowing tailor-sdk to manage previously unmanaged resources.\n * @param resources - Unmanaged resources\n * @param appName - Target application name\n * @param yes - Whether to auto-confirm without prompting\n * @returns Promise that resolves when confirmation completes\n */\nexport async function confirmUnmanagedResources(\n resources: UnmanagedResource[],\n appName: string,\n yes: boolean,\n): Promise<void> {\n if (resources.length === 0) return;\n\n logger.warn(\"Existing resources not tracked by tailor-sdk were found:\");\n\n logger.log(` ${styles.info(\"Resources\")}:`);\n for (const r of resources) {\n logger.log(` • ${styles.bold(r.resourceType)} ${styles.info(`\"${r.resourceName}\"`)}`);\n }\n logger.newline();\n logger.log(\" These resources may have been created by older SDK versions, Terraform, or CUE.\");\n logger.log(\" To continue, confirm that tailor-sdk should manage them.\");\n logger.log(\n \" If they are managed by another tool (e.g., Terraform), cancel and manage them there instead.\",\n );\n\n if (yes) {\n logger.success(`Adding to \"${appName}\" (--yes flag specified)...`, {\n mode: \"plain\",\n });\n return;\n }\n\n const confirmed = await logger.prompt(\n `Allow tailor-sdk to manage these resources for \"${appName}\"?`,\n { type: \"confirm\", initial: false },\n );\n if (!confirmed) {\n throw new Error(ml`\n Apply cancelled. Resources remain unmanaged.\n To override, run again and confirm, or use --yes flag.\n `);\n }\n}\n\nexport interface ImportantResourceDeletion {\n resourceType: string;\n resourceName: string;\n}\n\n/**\n * Confirm deletion of important resources.\n * @param resources - Resources scheduled for deletion\n * @param yes - Whether to auto-confirm without prompting\n * @returns Promise that resolves when confirmation completes\n */\nexport async function confirmImportantResourceDeletion(\n resources: ImportantResourceDeletion[],\n yes: boolean,\n): Promise<void> {\n if (resources.length === 0) return;\n\n logger.warn(\"The following resources will be deleted:\");\n\n logger.log(` ${styles.info(\"Resources\")}:`);\n for (const r of resources) {\n logger.log(` • ${styles.bold(r.resourceType)} ${styles.error(`\"${r.resourceName}\"`)}`);\n }\n logger.newline();\n logger.log(\n styles.warning(\" Deleting these resources will permanently remove all associated data.\"),\n );\n\n if (yes) {\n logger.success(\"Deleting resources (--yes flag specified)...\", {\n mode: \"plain\",\n });\n return;\n }\n\n const confirmed = await logger.prompt(\"Are you sure you want to delete these resources?\", {\n type: \"confirm\",\n initial: false,\n });\n if (!confirmed) {\n throw new Error(ml`\n Apply cancelled. Resources will not be deleted.\n To override, run again and confirm, or use --yes flag.\n `);\n }\n}\n","/**\n * Runtime args transformation for all services.\n *\n * Each service transforms server-side args/context into SDK-friendly format:\n * - Executor: server-side expression evaluated by platform before calling function\n * - Resolver: operationHook expression evaluated by platform before calling function\n *\n * The user field mapping (server → SDK) shared across services is defined in\n * `@/parser/service/tailordb` as `tailorUserMap`.\n */\nimport { tailorUserMap } from \"@/parser/service/tailordb\";\nimport type { Trigger } from \"@/types/executor.generated\";\n\n// ---------------------------------------------------------------------------\n// Executor\n// ---------------------------------------------------------------------------\n\n/**\n * Actor field transformation expression.\n *\n * Transforms the server's actor object to match the SDK's TailorActor type:\n * server `attributeMap` → SDK `attributes`\n * server `attributes` → SDK `attributeList`\n * other fields → passed through\n * null/undefined actor → null\n */\nconst ACTOR_TRANSFORM_EXPR =\n `actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ` +\n `({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null`;\n\n/**\n * Build the JavaScript expression that transforms server-format executor event\n * args into SDK-format args at runtime.\n *\n * The Tailor Platform server delivers event args with server-side field names.\n * The SDK exposes different field names to user code. This function produces a\n * JavaScript expression string that performs the mapping when evaluated\n * server-side.\n * @param triggerKind - The trigger kind discriminant from the parsed executor\n * @param env - Application env record to embed in the expression\n * @returns A JavaScript expression string, e.g. `({ ...args, ... })`\n */\nexport function buildExecutorArgsExpr(\n triggerKind: Trigger[\"kind\"],\n env: Record<string, string | number | boolean>,\n): string {\n const envExpr = `env: ${JSON.stringify(env)}`;\n\n switch (triggerKind) {\n // Event triggers with actor + standard field mapping\n case \"schedule\":\n case \"recordCreated\":\n case \"recordUpdated\":\n case \"recordDeleted\":\n case \"idpUserCreated\":\n case \"idpUserUpdated\":\n case \"idpUserDeleted\":\n case \"authAccessTokenIssued\":\n case \"authAccessTokenRefreshed\":\n case \"authAccessTokenRevoked\":\n return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, ${envExpr} })`;\n\n // resolverExecuted: actor + success/result/error mapping\n case \"resolverExecuted\":\n return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error, ${envExpr} })`;\n\n // incomingWebhook: rawBody mapping, no actor\n case \"incomingWebhook\":\n return `({ ...args, appNamespace: args.namespaceName, rawBody: args.raw_body, ${envExpr} })`;\n\n default:\n throw new Error(`Unknown trigger kind for args expression: ${triggerKind satisfies never}`);\n }\n}\n\n// ---------------------------------------------------------------------------\n// Resolver\n// ---------------------------------------------------------------------------\n\n/**\n * Build the operationHook expression for resolver pipelines.\n *\n * Transforms server context to SDK resolver context:\n * context.args → input\n * context.pipeline → spread into result\n * user (global var) → TailorUser (via tailorUserMap: workspace_id→workspaceId, attribute_map→attributes, attributes→attributeList)\n * env → injected as JSON\n * @param env - Application env record to embed in the expression\n * @returns A JavaScript expression string for the operationHook\n */\nexport function buildResolverOperationHookExpr(\n env: Record<string, string | number | boolean>,\n): string {\n return `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });`;\n}\n","import * as crypto from \"node:crypto\";\nimport * as fs from \"node:fs\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport * as path from \"pathe\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { createChangeSet } from \"./change-set\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase } from \"@/cli/commands/apply/apply\";\nimport type { Application } from \"@/cli/services/application\";\nimport type { CollectedJob } from \"@/cli/services/workflow/service\";\nimport type { MessageInitShape } from \"@bufbuild/protobuf\";\nimport type {\n CreateFunctionRegistryRequestSchema,\n UpdateFunctionRegistryRequestSchema,\n} from \"@tailor-proto/tailor/v1/function_registry_pb\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\nconst CHUNK_SIZE = 64 * 1024; // 64KB\n\nexport type FunctionEntry = {\n name: string;\n scriptContent: string;\n contentHash: string;\n description: string;\n};\n\ntype CreateFunction = {\n name: string;\n entry: FunctionEntry;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateFunction = {\n name: string;\n entry: FunctionEntry;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteFunction = {\n name: string;\n workspaceId: string;\n};\n\n/**\n * Compute SHA-256 content hash for a script string.\n * @param content - Script content to hash\n * @returns Hex-encoded SHA-256 hash\n */\nfunction computeContentHash(content: string): string {\n return crypto.createHash(\"sha256\").update(content, \"utf-8\").digest(\"hex\");\n}\n\nfunction functionRegistryTrn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:function_registry:${name}`;\n}\n\n/**\n * Build a function registry name for a resolver.\n * @param namespace - Resolver namespace\n * @param resolverName - Resolver name\n * @returns Function registry name\n */\nexport function resolverFunctionName(namespace: string, resolverName: string): string {\n return `resolver--${namespace}--${resolverName}`;\n}\n\n/**\n * Build a function registry name for an executor.\n * @param executorName - Executor name\n * @returns Function registry name\n */\nexport function executorFunctionName(executorName: string): string {\n return `executor--${executorName}`;\n}\n\n/**\n * Build a function registry name for a workflow job.\n * @param jobName - Workflow job name\n * @returns Function registry name\n */\nexport function workflowJobFunctionName(jobName: string): string {\n return `workflow--${jobName}`;\n}\n\n/**\n * Collect all function entries from bundled scripts for all services.\n * @param application - Application definition\n * @param workflowJobs - Collected workflow jobs from config\n * @returns Array of function entries to register\n */\nexport function collectFunctionEntries(\n application: Readonly<Application>,\n workflowJobs: CollectedJob[],\n): FunctionEntry[] {\n const entries: FunctionEntry[] = [];\n const distDir = getDistDir();\n\n // Resolvers\n for (const app of application.applications) {\n for (const pipeline of app.resolverServices) {\n for (const resolver of Object.values(pipeline.resolvers)) {\n const scriptPath = path.join(distDir, \"resolvers\", `${resolver.name}.js`);\n try {\n const content = fs.readFileSync(scriptPath, \"utf-8\");\n entries.push({\n name: resolverFunctionName(pipeline.namespace, resolver.name),\n scriptContent: content,\n contentHash: computeContentHash(content),\n description: `Resolver: ${pipeline.namespace}/${resolver.name}`,\n });\n } catch {\n logger.warn(`Function file not found: ${scriptPath}`);\n }\n }\n }\n }\n\n // Executors\n if (application.executorService) {\n const executors = application.executorService.executors;\n for (const executor of Object.values(executors)) {\n if (executor.operation.kind === \"function\" || executor.operation.kind === \"jobFunction\") {\n const scriptPath = path.join(distDir, \"executors\", `${executor.name}.js`);\n try {\n const content = fs.readFileSync(scriptPath, \"utf-8\");\n entries.push({\n name: executorFunctionName(executor.name),\n scriptContent: content,\n contentHash: computeContentHash(content),\n description: `Executor: ${executor.name}`,\n });\n } catch {\n logger.warn(`Function file not found: ${scriptPath}`);\n }\n }\n }\n }\n\n // Workflow jobs\n for (const job of workflowJobs) {\n const scriptPath = path.join(distDir, \"workflow-jobs\", `${job.name}.js`);\n try {\n const content = fs.readFileSync(scriptPath, \"utf-8\");\n entries.push({\n name: workflowJobFunctionName(job.name),\n scriptContent: content,\n contentHash: computeContentHash(content),\n description: `Workflow job: ${job.name}`,\n });\n } catch {\n logger.warn(`Function file not found: ${scriptPath}`);\n }\n }\n\n return entries;\n}\n\ntype ExistingFunction = {\n name: string;\n contentHash: string;\n};\n\n/**\n * Plan function registry changes based on current and desired state.\n * @param client - Operator client instance\n * @param workspaceId - Workspace ID\n * @param appName - Application name\n * @param entries - Desired function entries\n * @returns Planned changes\n */\nexport async function planFunctionRegistry(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n entries: FunctionEntry[],\n) {\n const changeSet = createChangeSet<CreateFunction, UpdateFunction, DeleteFunction>(\n \"Function registry\",\n );\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n // Fetch existing function registry entries\n const existingFunctions = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const response = await client.listFunctionRegistries({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [\n response.functions.map(\n (f): ExistingFunction => ({\n name: f.name,\n contentHash: f.contentHash,\n }),\n ),\n response.nextPageToken,\n ];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n\n // Build map of existing functions with their labels\n const existingMap: WithLabel<ExistingFunction> = {};\n await Promise.all(\n existingFunctions.map(async (func) => {\n const { metadata } = await client.getMetadata({\n trn: functionRegistryTrn(workspaceId, func.name),\n });\n existingMap[func.name] = {\n resource: func,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n // Process desired entries\n for (const entry of entries) {\n const existing = existingMap[entry.name];\n const metaRequest = await buildMetaRequest(\n functionRegistryTrn(workspaceId, entry.name),\n appName,\n );\n\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Function registry\",\n resourceName: entry.name,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"Function registry\",\n resourceName: entry.name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: entry.name,\n entry,\n metaRequest,\n });\n delete existingMap[entry.name];\n } else {\n changeSet.creates.push({\n name: entry.name,\n entry,\n metaRequest,\n });\n }\n }\n\n // Remaining entries in existingMap are candidates for deletion\n for (const [name, existing] of Object.entries(existingMap)) {\n if (!existing) continue;\n const label = existing.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete functions managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name,\n workspaceId,\n });\n }\n }\n\n changeSet.print();\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\n/**\n * Upload a function script to the function registry using client streaming.\n * @param client - Operator client instance\n * @param workspaceId - Workspace ID\n * @param entry - Function entry to upload\n * @param isCreate - Whether this is a create (true) or update (false)\n */\nasync function uploadFunctionScript(\n client: OperatorClient,\n workspaceId: string,\n entry: FunctionEntry,\n isCreate: boolean,\n) {\n const buffer = Buffer.from(entry.scriptContent, \"utf-8\");\n\n const info = {\n workspaceId,\n name: entry.name,\n description: entry.description,\n sizeBytes: BigInt(buffer.length),\n contentHash: entry.contentHash,\n };\n\n if (isCreate) {\n /** @yields {MessageInitShape<typeof CreateFunctionRegistryRequestSchema>} Create request messages (info header followed by content chunks) */\n async function* createStream(): AsyncIterable<\n MessageInitShape<typeof CreateFunctionRegistryRequestSchema>\n > {\n yield { payload: { case: \"info\" as const, value: info } };\n for (let i = 0; i < buffer.length; i += CHUNK_SIZE) {\n yield {\n payload: {\n case: \"chunk\" as const,\n value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length)),\n },\n };\n }\n }\n await client.createFunctionRegistry(createStream());\n } else {\n /** @yields {MessageInitShape<typeof UpdateFunctionRegistryRequestSchema>} Update request messages (info header followed by content chunks) */\n async function* updateStream(): AsyncIterable<\n MessageInitShape<typeof UpdateFunctionRegistryRequestSchema>\n > {\n yield { payload: { case: \"info\" as const, value: info } };\n for (let i = 0; i < buffer.length; i += CHUNK_SIZE) {\n yield {\n payload: {\n case: \"chunk\" as const,\n value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length)),\n },\n };\n }\n }\n await client.updateFunctionRegistry(updateStream());\n }\n}\n\n/**\n * Apply function registry changes for the given phase.\n * @param client - Operator client instance\n * @param workspaceId - Workspace ID\n * @param result - Planned function registry changes\n * @param phase - Apply phase\n */\nexport async function applyFunctionRegistry(\n client: OperatorClient,\n workspaceId: string,\n result: Awaited<ReturnType<typeof planFunctionRegistry>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // Upload new functions\n for (const create of changeSet.creates) {\n await uploadFunctionScript(client, workspaceId, create.entry, true);\n await client.setMetadata(create.metaRequest);\n }\n\n // Update existing functions (server deduplicates content by hash)\n for (const update of changeSet.updates) {\n await uploadFunctionScript(client, workspaceId, update.entry, false);\n await client.setMetadata(update.metaRequest);\n }\n } else if (phase === \"delete\") {\n await Promise.all(\n changeSet.deletes.map((del) =>\n client.deleteFunctionRegistry({\n workspaceId: del.workspaceId,\n name: del.name,\n }),\n ),\n );\n }\n}\n","import { type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n type CreateExecutorExecutorRequestSchema,\n type DeleteExecutorExecutorRequestSchema,\n type UpdateExecutorExecutorRequestSchema,\n} from \"@tailor-proto/tailor/v1/executor_pb\";\nimport {\n type ExecutorExecutorSchema,\n type ExecutorTargetConfigSchema,\n ExecutorTargetType,\n type ExecutorTargetWebhookHeaderSchema,\n type ExecutorTriggerConfigSchema,\n ExecutorTriggerType,\n} from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { buildExecutorArgsExpr } from \"@/cli/shared/runtime-args\";\nimport { stringifyFunction } from \"@/parser/service/tailordb\";\nimport { createChangeSet } from \"./change-set\";\nimport { executorFunctionName } from \"./function-registry\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { Executor, Trigger } from \"@/types/executor.generated\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Apply executor-related changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned executor changes\n * @param phase - Apply phase (defaults to \"create-update\")\n * @returns Promise that resolves when executors are applied\n */\nexport async function applyExecutor(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planExecutor>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // Executors\n await Promise.all([\n ...changeSet.creates.map(async (create) => {\n await client.createExecutorExecutor(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.updates.map(async (update) => {\n await client.updateExecutorExecutor(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n } else if (phase === \"delete\") {\n // Delete in reverse order of dependencies\n // Executors\n await Promise.all(changeSet.deletes.map((del) => client.deleteExecutorExecutor(del.request)));\n }\n}\n\ntype CreateExecutor = {\n name: string;\n request: MessageInitShape<typeof CreateExecutorExecutorRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateExecutor = {\n name: string;\n request: MessageInitShape<typeof UpdateExecutorExecutorRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteExecutor = {\n name: string;\n request: MessageInitShape<typeof DeleteExecutorExecutorRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:executor:${name}`;\n}\n\n/**\n * Plan executor-related changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes\n */\nexport async function planExecutor(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const changeSet = createChangeSet<CreateExecutor, UpdateExecutor, DeleteExecutor>(\"Executors\");\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { executors, nextPageToken } = await client.listExecutorExecutors({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [executors, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingExecutors: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.name),\n });\n existingExecutors[resource.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n const executors = forRemoval ? {} : ((await application.executorService?.loadExecutors()) ?? {});\n for (const executor of Object.values(executors)) {\n const existing = existingExecutors[executor.name];\n const metaRequest = await buildMetaRequest(trn(workspaceId, executor.name), application.name);\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Executor\",\n resourceName: executor.name,\n });\n } else if (existing.label !== application.name) {\n conflicts.push({\n resourceType: \"Executor\",\n resourceName: executor.name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: executor.name,\n request: {\n workspaceId,\n executor: protoExecutor(application.name, executor, application.env),\n },\n metaRequest,\n });\n delete existingExecutors[executor.name];\n } else {\n changeSet.creates.push({\n name: executor.name,\n request: {\n workspaceId,\n executor: protoExecutor(application.name, executor, application.env),\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingExecutors).forEach(([name]) => {\n const label = existingExecutors[name]?.label;\n if (label && label !== application.name) {\n resourceOwners.add(label);\n }\n // Only delete executors managed by this application\n if (label === application.name) {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n name,\n },\n });\n }\n });\n\n changeSet.print();\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\nfunction protoExecutor(\n appName: string,\n executor: Executor,\n env: Record<string, string | number | boolean>,\n): MessageInitShape<typeof ExecutorExecutorSchema> {\n const trigger = executor.trigger;\n let triggerType: ExecutorTriggerType;\n let triggerConfig: MessageInitShape<typeof ExecutorTriggerConfigSchema>;\n\n const argsExpr = buildExecutorArgsExpr(trigger.kind, env);\n\n const eventType: { [key in Trigger[\"kind\"]]?: string } = {\n recordCreated: \"tailordb.type_record.created\",\n recordUpdated: \"tailordb.type_record.updated\",\n recordDeleted: \"tailordb.type_record.deleted\",\n resolverExecuted: \"pipeline.resolver.executed\",\n idpUserCreated: \"idp.user.created\",\n idpUserUpdated: \"idp.user.updated\",\n idpUserDeleted: \"idp.user.deleted\",\n authAccessTokenIssued: \"auth.access_token.issued\",\n authAccessTokenRefreshed: \"auth.access_token.refreshed\",\n authAccessTokenRevoked: \"auth.access_token.revoked\",\n };\n switch (trigger.kind) {\n case \"schedule\":\n triggerType = ExecutorTriggerType.SCHEDULE;\n triggerConfig = {\n config: {\n case: \"schedule\",\n value: {\n timezone: trigger.timezone,\n frequency: trigger.cron,\n },\n },\n };\n break;\n case \"recordCreated\":\n case \"recordUpdated\":\n case \"recordDeleted\":\n triggerType = ExecutorTriggerType.EVENT;\n triggerConfig = {\n config: {\n case: \"event\",\n value: {\n eventType: eventType[trigger.kind],\n condition: {\n expr: [\n /* js */ `args.typeName === \"${trigger.typeName}\"`,\n ...(trigger.condition\n ? [/* js */ `(${stringifyFunction(trigger.condition)})(${argsExpr})`]\n : []),\n ].join(\" && \"),\n },\n },\n },\n };\n break;\n case \"resolverExecuted\":\n triggerType = ExecutorTriggerType.EVENT;\n triggerConfig = {\n config: {\n case: \"event\",\n value: {\n eventType: eventType[trigger.kind],\n condition: {\n expr: [\n /* js */ `args.resolverName === \"${trigger.resolverName}\"`,\n ...(trigger.condition\n ? [/* js */ `(${stringifyFunction(trigger.condition)})(${argsExpr})`]\n : []),\n ].join(\" && \"),\n },\n },\n },\n };\n break;\n case \"incomingWebhook\":\n triggerType = ExecutorTriggerType.INCOMING_WEBHOOK;\n triggerConfig = {\n config: {\n case: \"incomingWebhook\",\n value: {},\n },\n };\n break;\n case \"idpUserCreated\":\n case \"idpUserUpdated\":\n case \"idpUserDeleted\":\n case \"authAccessTokenIssued\":\n case \"authAccessTokenRefreshed\":\n case \"authAccessTokenRevoked\":\n triggerType = ExecutorTriggerType.EVENT;\n triggerConfig = {\n config: {\n case: \"event\",\n value: {\n eventType: eventType[trigger.kind],\n },\n },\n };\n break;\n default:\n throw new Error(`Unknown trigger: ${trigger satisfies never}`);\n }\n\n const target = executor.operation;\n let targetType: ExecutorTargetType;\n let targetConfig: MessageInitShape<typeof ExecutorTargetConfigSchema>;\n\n switch (target.kind) {\n case \"webhook\": {\n targetType = ExecutorTargetType.WEBHOOK;\n targetConfig = {\n config: {\n case: \"webhook\",\n value: {\n url: {\n expr: `(${stringifyFunction(target.url)})(${argsExpr})`,\n },\n headers: target.headers\n ? Object.entries(target.headers).map(([key, v]) => {\n let value: MessageInitShape<typeof ExecutorTargetWebhookHeaderSchema>[\"value\"];\n if (typeof v === \"string\") {\n value = {\n case: \"rawValue\",\n value: v,\n };\n } else {\n value = {\n case: \"secretValue\",\n value: {\n vaultName: v.vault,\n secretKey: v.key,\n },\n };\n }\n return { key, value };\n })\n : undefined,\n body: target.requestBody\n ? {\n expr: `(${stringifyFunction(target.requestBody)})(${argsExpr})`,\n }\n : undefined,\n },\n },\n };\n break;\n }\n case \"graphql\": {\n targetType = ExecutorTargetType.TAILOR_GRAPHQL;\n targetConfig = {\n config: {\n case: \"tailorGraphql\",\n value: {\n appName: target.appName ?? appName,\n query: target.query,\n variables: target.variables\n ? {\n expr: `(${stringifyFunction(target.variables)})(${argsExpr})`,\n }\n : undefined,\n invoker: target.authInvoker ?? undefined,\n },\n },\n };\n break;\n }\n case \"function\":\n case \"jobFunction\": {\n if (target.kind === \"function\") {\n targetType = ExecutorTargetType.FUNCTION;\n } else {\n targetType = ExecutorTargetType.JOB_FUNCTION;\n }\n\n targetConfig = {\n config: {\n case: \"function\",\n value: {\n name: \"operation\",\n scriptRef: executorFunctionName(executor.name),\n variables: {\n expr: argsExpr,\n },\n invoker: target.authInvoker ?? undefined,\n },\n },\n };\n break;\n }\n case \"workflow\": {\n targetType = ExecutorTargetType.WORKFLOW;\n targetConfig = {\n config: {\n case: \"workflow\",\n value: {\n workflowName: target.workflowName,\n variables: target.args\n ? typeof target.args === \"function\"\n ? { expr: `(${stringifyFunction(target.args)})(${argsExpr})` }\n : { expr: JSON.stringify(target.args) }\n : undefined,\n invoker: target.authInvoker ?? undefined,\n },\n },\n };\n break;\n }\n default:\n throw new Error(`Unknown target: ${target satisfies never}`);\n }\n\n return {\n name: executor.name,\n description: executor.description,\n disabled: executor.disabled,\n triggerType,\n triggerConfig,\n targetType,\n targetConfig,\n };\n}\n","import { type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n type CreatePipelineResolverRequestSchema,\n type CreatePipelineServiceRequestSchema,\n type DeletePipelineResolverRequestSchema,\n type DeletePipelineServiceRequestSchema,\n type UpdatePipelineResolverRequestSchema,\n type UpdatePipelineServiceRequestSchema,\n} from \"@tailor-proto/tailor/v1/pipeline_pb\";\nimport {\n type PipelineResolver_FieldSchema,\n PipelineResolver_OperationType,\n type PipelineResolver_PipelineSchema,\n type PipelineResolver_TypeSchema,\n type PipelineResolverSchema,\n} from \"@tailor-proto/tailor/v1/pipeline_resource_pb\";\nimport * as inflection from \"inflection\";\nimport { type ResolverService } from \"@/cli/services/resolver/service\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { buildResolverOperationHookExpr } from \"@/cli/shared/runtime-args\";\nimport { createChangeSet } from \"./change-set\";\nimport { resolverFunctionName } from \"./function-registry\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { Executor } from \"@/types/executor.generated\";\nimport type { Resolver, TailorField } from \"@/types/resolver.generated\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n// Scalar type mapping for field type conversion\nconst SCALAR_TYPE_MAP = {\n uuid: { kind: \"ScalarType\", name: \"ID\" },\n string: { kind: \"ScalarType\", name: \"String\" },\n integer: { kind: \"ScalarType\", name: \"Int\" },\n float: { kind: \"ScalarType\", name: \"Float\" },\n decimal: { kind: \"CustomScalarType\", name: \"Decimal\" },\n boolean: { kind: \"ScalarType\", name: \"Boolean\" },\n date: { kind: \"CustomScalarType\", name: \"Date\" },\n datetime: { kind: \"CustomScalarType\", name: \"DateTime\" },\n time: { kind: \"CustomScalarType\", name: \"Time\" },\n} as const satisfies Record<\n Exclude<TailorField[\"type\"], \"enum\" | \"nested\">,\n { kind: \"ScalarType\" | \"CustomScalarType\"; name: string }\n>;\n\n/**\n * Apply resolver pipeline changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned pipeline changes\n * @param phase - Apply phase\n * @returns Promise that resolves when pipeline changes are applied\n */\nexport async function applyPipeline(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planPipeline>>,\n phase: Exclude<ApplyPhase, \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // Services\n await Promise.all([\n ...changeSet.service.creates.map(async (create) => {\n await client.createPipelineService(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.service.updates.map(async (update) => {\n await client.updatePipelineService(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n\n // Resolvers\n await Promise.all([\n ...changeSet.resolver.creates.map((create) => client.createPipelineResolver(create.request)),\n ...changeSet.resolver.updates.map((update) => client.updatePipelineResolver(update.request)),\n ]);\n } else if (phase === \"delete-resources\") {\n // Delete in reverse order of dependencies\n // Resolvers\n await Promise.all(\n changeSet.resolver.deletes.map((del) => client.deletePipelineResolver(del.request)),\n );\n } else if (phase === \"delete-services\") {\n // Services only\n await Promise.all(\n changeSet.service.deletes.map((del) => client.deletePipelineService(del.request)),\n );\n }\n}\n\n/**\n * Plan resolver pipeline changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes\n */\nexport async function planPipeline(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const pipelines: Readonly<ResolverService>[] = [];\n if (!forRemoval) {\n for (const pipeline of application.resolverServices) {\n await pipeline.loadResolvers();\n pipelines.push(pipeline);\n }\n }\n const executors = forRemoval\n ? []\n : Object.values((await application.executorService?.loadExecutors()) ?? {});\n\n const {\n changeSet: serviceChangeSet,\n conflicts,\n unmanaged,\n resourceOwners,\n } = await planServices(client, workspaceId, application.name, pipelines);\n const deletedServices = serviceChangeSet.deletes.map((del) => del.name);\n const resolverChangeSet = await planResolvers(\n client,\n workspaceId,\n pipelines,\n executors,\n deletedServices,\n application.env,\n );\n\n serviceChangeSet.print();\n resolverChangeSet.print();\n return {\n changeSet: {\n service: serviceChangeSet,\n resolver: resolverChangeSet,\n },\n conflicts,\n unmanaged,\n resourceOwners,\n };\n}\n\ntype CreateService = {\n name: string;\n request: MessageInitShape<typeof CreatePipelineServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateService = {\n name: string;\n request: MessageInitShape<typeof UpdatePipelineServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteService = {\n name: string;\n request: MessageInitShape<typeof DeletePipelineServiceRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:pipeline:${name}`;\n}\n\nasync function planServices(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n pipelines: ReadonlyArray<Readonly<ResolverService>>,\n) {\n const changeSet = createChangeSet<CreateService, UpdateService, DeleteService>(\n \"Pipeline services\",\n );\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { pipelineServices, nextPageToken } = await client.listPipelineServices({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [pipelineServices, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingServices: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n if (!resource.namespace?.name) {\n return;\n }\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.namespace.name),\n });\n existingServices[resource.namespace.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n for (const pipeline of pipelines) {\n const existing = existingServices[pipeline.namespace];\n const metaRequest = await buildMetaRequest(trn(workspaceId, pipeline.namespace), appName);\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Pipeline service\",\n resourceName: pipeline.namespace,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"Pipeline service\",\n resourceName: pipeline.namespace,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: pipeline.namespace,\n request: {\n workspaceId,\n namespaceName: pipeline.namespace,\n },\n metaRequest,\n });\n delete existingServices[pipeline.namespace];\n } else {\n changeSet.creates.push({\n name: pipeline.namespace,\n request: {\n workspaceId,\n namespaceName: pipeline.namespace,\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingServices).forEach(([namespaceName]) => {\n const label = existingServices[namespaceName]?.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete services managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n });\n\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\ntype CreateResolver = {\n name: string;\n request: MessageInitShape<typeof CreatePipelineResolverRequestSchema>;\n};\n\ntype UpdateResolver = {\n name: string;\n request: MessageInitShape<typeof UpdatePipelineResolverRequestSchema>;\n};\n\ntype DeleteResolver = {\n name: string;\n request: MessageInitShape<typeof DeletePipelineResolverRequestSchema>;\n};\n\nasync function planResolvers(\n client: OperatorClient,\n workspaceId: string,\n pipelines: ReadonlyArray<Readonly<ResolverService>>,\n executors: ReadonlyArray<Executor>,\n deletedServices: ReadonlyArray<string>,\n env: Record<string, string | number | boolean>,\n) {\n const changeSet = createChangeSet<CreateResolver, UpdateResolver, DeleteResolver>(\n \"Pipeline resolvers\",\n );\n\n const fetchResolvers = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { pipelineResolvers, nextPageToken } = await client.listPipelineResolvers({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [pipelineResolvers, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n const executorUsedResolvers = new Set<string>();\n for (const executor of executors) {\n if (executor.trigger.kind === \"resolverExecuted\") {\n executorUsedResolvers.add(executor.trigger.resolverName);\n }\n }\n\n // Validate that resolvers used by executors don't have publishEvents explicitly set to false\n for (const pipeline of pipelines) {\n for (const resolver of Object.values(pipeline.resolvers)) {\n if (executorUsedResolvers.has(resolver.name) && resolver.publishEvents === false) {\n throw new Error(\n `Resolver \"${resolver.name}\" has publishEvents set to false, but it is used by an executor with a resolverExecuted trigger. ` +\n `Either remove the publishEvents: false setting or remove the executor trigger for this resolver.`,\n );\n }\n }\n }\n\n for (const pipeline of pipelines) {\n const existingResolvers = await fetchResolvers(pipeline.namespace);\n const existingNameSet = new Set<string>();\n existingResolvers.forEach((resolver) => {\n existingNameSet.add(resolver.name);\n });\n for (const resolver of Object.values(pipeline.resolvers)) {\n if (existingNameSet.has(resolver.name)) {\n changeSet.updates.push({\n name: resolver.name,\n request: {\n workspaceId,\n namespaceName: pipeline.namespace,\n pipelineResolver: processResolver(\n pipeline.namespace,\n resolver,\n executorUsedResolvers,\n env,\n ),\n },\n });\n existingNameSet.delete(resolver.name);\n } else {\n changeSet.creates.push({\n name: resolver.name,\n request: {\n workspaceId,\n namespaceName: pipeline.namespace,\n pipelineResolver: processResolver(\n pipeline.namespace,\n resolver,\n executorUsedResolvers,\n env,\n ),\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: pipeline.namespace,\n resolverName: name,\n },\n });\n });\n }\n\n for (const namespaceName of deletedServices) {\n const existingResolvers = await fetchResolvers(namespaceName);\n existingResolvers.forEach((resolver) => {\n changeSet.deletes.push({\n name: resolver.name,\n request: {\n workspaceId,\n namespaceName,\n resolverName: resolver.name,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction processResolver(\n namespace: string,\n resolver: Resolver,\n executorUsedResolvers: ReadonlySet<string>,\n env: Record<string, string | number | boolean>,\n): MessageInitShape<typeof PipelineResolverSchema> {\n const pipelines: MessageInitShape<typeof PipelineResolver_PipelineSchema>[] = [\n {\n name: \"body\",\n operationName: \"body\",\n description: `${resolver.name} function body`,\n operationType: PipelineResolver_OperationType.FUNCTION,\n operationSourceRef: resolverFunctionName(namespace, resolver.name),\n operationHook: {\n expr: buildResolverOperationHookExpr(env),\n },\n postScript: `args.body`,\n },\n ];\n\n const typeBaseName = inflection.camelize(resolver.name);\n\n // Build inputs\n const inputs: MessageInitShape<typeof PipelineResolver_FieldSchema>[] = resolver.input\n ? protoFields(resolver.input, `${typeBaseName}Input`, true)\n : [];\n\n // Build response\n const response: MessageInitShape<typeof PipelineResolver_FieldSchema> = protoFields(\n { \"\": resolver.output },\n `${typeBaseName}Output`,\n false,\n )[0];\n\n // Build description (combine resolver description and output description)\n const resolverDescription = resolver.description || `${resolver.name} resolver`;\n const outputDescription = resolver.output.metadata.description;\n const combinedDescription = outputDescription\n ? `${resolverDescription}\\n\\nReturns:\\n${outputDescription}`\n : resolverDescription;\n\n // Determine publishExecutionEvents (user-facing name: publishEvents):\n // - If user explicitly sets a value (true or false), respect that (validation already ensures no executor conflict)\n // - If not set, use executor detection (true if executor uses this resolver)\n let publishExecutionEvents = false;\n if (resolver.publishEvents !== undefined) {\n publishExecutionEvents = resolver.publishEvents;\n } else if (executorUsedResolvers.has(resolver.name)) {\n publishExecutionEvents = true;\n }\n\n return {\n authorization: \"true==true\",\n description: combinedDescription,\n inputs,\n name: resolver.name,\n operationType: resolver.operation,\n response,\n pipelines,\n publishExecutionEvents,\n };\n}\n\nfunction protoFields(\n fields: Record<string, TailorField>,\n baseName: string,\n isInput: boolean,\n): MessageInitShape<typeof PipelineResolver_FieldSchema>[] {\n if (!fields) {\n return [];\n }\n\n return Object.entries(fields).map(([fieldName, field]) => {\n let type: MessageInitShape<typeof PipelineResolver_TypeSchema>;\n const hasCreateHook = isInput && field.metadata.hooks?.create !== undefined;\n const required = hasCreateHook ? false : (field.metadata.required ?? true);\n\n if (field.type === \"nested\") {\n const typeName = field.metadata.typeName ?? `${baseName}${inflection.camelize(fieldName)}`;\n type = {\n kind: \"UserDefined\",\n name: typeName,\n description: field.metadata.description ?? \"\",\n required,\n fields: protoFields(field.fields, typeName, isInput),\n };\n } else if (field.type === \"enum\") {\n const typeName = field.metadata.typeName ?? `${baseName}${inflection.camelize(fieldName)}`;\n type = {\n kind: \"EnumType\",\n name: typeName,\n required,\n allowedValues: field.metadata.allowedValues,\n };\n } else {\n type = { ...SCALAR_TYPE_MAP[field.type], required };\n }\n\n return {\n name: fieldName,\n description: field.metadata.description,\n array: field.metadata.array ?? false,\n required,\n type,\n };\n });\n}\n","import { createHash } from \"node:crypto\";\nimport { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport * as path from \"pathe\";\nimport { z } from \"zod\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\n\nconst SecretsStateSchema = z.object({\n vaults: z.record(z.string(), z.record(z.string(), z.string())),\n});\n\nexport type SecretsState = z.infer<typeof SecretsStateSchema>;\n\n/**\n * Get the file path for the secrets state JSON.\n * @returns Absolute path to secrets-state.json\n */\nexport function getSecretsStatePath(): string {\n return path.join(getDistDir(), \"secrets-state.json\");\n}\n\n/**\n * Load secrets hash state from disk.\n * @returns Persisted state, or empty state if file is missing or corrupted\n */\nexport function loadSecretsState(): SecretsState {\n const filePath = getSecretsStatePath();\n if (!existsSync(filePath)) {\n return { vaults: {} };\n }\n try {\n const raw = readFileSync(filePath, \"utf-8\");\n return SecretsStateSchema.parse(JSON.parse(raw));\n } catch {\n return { vaults: {} };\n }\n}\n\n/**\n * Save secrets hash state to disk.\n * @param state - The secrets state to persist\n */\nexport function saveSecretsState(state: SecretsState): void {\n const filePath = getSecretsStatePath();\n const dir = path.dirname(filePath);\n mkdirSync(dir, { recursive: true });\n writeFileSync(filePath, JSON.stringify(state, null, 2), \"utf-8\");\n}\n\n/**\n * Compute SHA-256 hex digest of a value.\n * @param value - The string to hash\n * @returns Hex-encoded SHA-256 hash\n */\nexport function hashValue(value: string): string {\n return createHash(\"sha256\").update(value).digest(\"hex\");\n}\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { createChangeSet } from \"./change-set\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport { hashValue, loadSecretsState, saveSecretsState } from \"./secrets-state\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { Application } from \"@/cli/services/application\";\n\ntype CreateVault = {\n name: string;\n workspaceId: string;\n};\n\ntype ExistingVault = {\n name: string;\n workspaceId: string;\n};\n\ntype DeleteVault = {\n name: string;\n workspaceId: string;\n};\n\ntype CreateSecret = {\n name: string;\n secretName: string;\n workspaceId: string;\n vaultName: string;\n value: string;\n};\n\ntype UpdateSecret = {\n name: string;\n secretName: string;\n workspaceId: string;\n vaultName: string;\n value: string;\n};\n\ntype DeleteSecret = {\n name: string;\n secretName: string;\n workspaceId: string;\n vaultName: string;\n};\n\n/**\n * Plan secret manager changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes for vaults and secrets\n */\nexport async function planSecretManager(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const secretVaults = forRemoval ? [] : application.secrets;\n\n const vaultChangeSet = createChangeSet<CreateVault, ExistingVault, DeleteVault>(\n \"Secret Manager vaults\",\n );\n const secretChangeSet = createChangeSet<CreateSecret, UpdateSecret, DeleteSecret>(\n \"Secret Manager secrets\",\n );\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n // Fetch all existing vaults with metadata to track managed resources\n const existingVaultList = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { vaults, nextPageToken } = await client.listSecretManagerVaults({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [vaults, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n\n const existingVaults: WithLabel<(typeof existingVaultList)[number]> = {};\n await Promise.all(\n existingVaultList.map(async (resource) => {\n const { metadata } = await client.getMetadata({\n trn: vaultTrn(workspaceId, resource.name),\n });\n existingVaults[resource.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n const state = loadSecretsState();\n\n await Promise.all(\n secretVaults.map(async (vault) => {\n const vaultName = vault.vaultName;\n const existing = existingVaults[vaultName];\n\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Secret Manager vault\",\n resourceName: vaultName,\n });\n } else if (existing.label !== application.name) {\n conflicts.push({\n resourceType: \"Secret Manager vault\",\n resourceName: vaultName,\n currentOwner: existing.label,\n });\n }\n // Track existing vault for metadata update\n vaultChangeSet.updates.push({\n name: vaultName,\n workspaceId,\n });\n delete existingVaults[vaultName];\n } else {\n vaultChangeSet.creates.push({\n name: vaultName,\n workspaceId,\n });\n }\n\n // Fetch existing secrets in this vault\n let existingSecrets: string[] = [];\n if (existing) {\n const secrets = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { secrets, nextPageToken } = await client.listSecretManagerSecrets({\n workspaceId,\n secretmanagerVaultName: vaultName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [secrets, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n existingSecrets = secrets.map((s) => s.name);\n }\n\n const existingSet = new Set(existingSecrets);\n\n // Diff secrets\n for (const secret of vault.secrets) {\n if (existingSet.has(secret.name)) {\n const currentHash = hashValue(secret.value);\n const storedHash = state.vaults[vaultName]?.[secret.name];\n if (currentHash !== storedHash) {\n secretChangeSet.updates.push({\n name: `${vaultName}/${secret.name}`,\n secretName: secret.name,\n workspaceId,\n vaultName,\n value: secret.value,\n });\n }\n existingSet.delete(secret.name);\n } else {\n secretChangeSet.creates.push({\n name: `${vaultName}/${secret.name}`,\n secretName: secret.name,\n workspaceId,\n vaultName,\n value: secret.value,\n });\n }\n }\n\n // Remaining in existingSet are orphans - mark for deletion\n for (const orphanName of existingSet) {\n secretChangeSet.deletes.push({\n name: `${vaultName}/${orphanName}`,\n secretName: orphanName,\n workspaceId,\n vaultName,\n });\n }\n }),\n );\n\n // Remaining existing vaults not in config - mark managed ones for deletion\n for (const [name, entry] of Object.entries(existingVaults)) {\n if (!entry) continue;\n const label = entry.label;\n if (label && label !== application.name) {\n resourceOwners.add(label);\n }\n if (label === application.name) {\n // Delete secrets inside the vault before deleting the vault itself\n const secrets = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { secrets, nextPageToken } = await client.listSecretManagerSecrets({\n workspaceId,\n secretmanagerVaultName: name,\n pageToken,\n pageSize: maxPageSize,\n });\n return [secrets, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n for (const secret of secrets) {\n secretChangeSet.deletes.push({\n name: `${name}/${secret.name}`,\n secretName: secret.name,\n workspaceId,\n vaultName: name,\n });\n }\n\n vaultChangeSet.deletes.push({\n name,\n workspaceId,\n });\n }\n }\n\n vaultChangeSet.print();\n secretChangeSet.print();\n return { vaultChangeSet, secretChangeSet, conflicts, unmanaged, resourceOwners };\n}\n\nfunction vaultTrn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:vault:${name}`;\n}\n\n/**\n * Apply secret manager changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned secret changes\n * @param phase - Apply phase\n * @param application - Application to read secrets from for hash state persistence\n * @returns Promise that resolves when secret changes are applied\n */\nexport async function applySecretManager(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planSecretManager>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n application?: Readonly<Application>,\n) {\n const { vaultChangeSet, secretChangeSet } = result;\n\n if (phase === \"create-update\") {\n // Create vaults first and set metadata\n await Promise.all(\n vaultChangeSet.creates.map(async (create) => {\n await client.createSecretManagerVault({\n workspaceId: create.workspaceId,\n secretmanagerVaultName: create.name,\n });\n if (application) {\n const metaRequest = await buildMetaRequest(\n vaultTrn(create.workspaceId, create.name),\n application.name,\n );\n await client.setMetadata(metaRequest);\n }\n }),\n );\n\n // Update metadata for existing vaults\n if (application) {\n await Promise.all(\n vaultChangeSet.updates.map(async (update) => {\n const metaRequest = await buildMetaRequest(\n vaultTrn(update.workspaceId, update.name),\n application.name,\n );\n await client.setMetadata(metaRequest);\n }),\n );\n }\n\n // Create new secrets\n await Promise.all(\n secretChangeSet.creates.map((create) =>\n client.createSecretManagerSecret({\n workspaceId: create.workspaceId,\n secretmanagerVaultName: create.vaultName,\n secretmanagerSecretName: create.secretName,\n secretmanagerSecretValue: create.value,\n }),\n ),\n );\n\n // Update existing secrets\n await Promise.all(\n secretChangeSet.updates.map((update) =>\n client.updateSecretManagerSecret({\n workspaceId: update.workspaceId,\n secretmanagerVaultName: update.vaultName,\n secretmanagerSecretName: update.secretName,\n secretmanagerSecretValue: update.value,\n }),\n ),\n );\n\n // Persist hash state for all secrets after successful apply\n if (application) {\n const state = loadSecretsState();\n for (const vault of application.secrets) {\n if (!state.vaults[vault.vaultName]) {\n state.vaults[vault.vaultName] = {};\n }\n for (const secret of vault.secrets) {\n state.vaults[vault.vaultName][secret.name] = hashValue(secret.value);\n }\n }\n saveSecretsState(state);\n }\n } else if (phase === \"delete\") {\n // Delete orphan secrets\n await Promise.all(\n secretChangeSet.deletes.map((del) =>\n client.deleteSecretManagerSecret({\n workspaceId: del.workspaceId,\n secretmanagerVaultName: del.vaultName,\n secretmanagerSecretName: del.secretName,\n }),\n ),\n );\n\n // Delete orphan vaults\n await Promise.all(\n vaultChangeSet.deletes.map((del) =>\n client.deleteSecretManagerVault({\n workspaceId: del.workspaceId,\n secretmanagerVaultName: del.name,\n }),\n ),\n );\n\n // Remove deleted secrets and vaults from hash state\n if (secretChangeSet.deletes.length > 0 || vaultChangeSet.deletes.length > 0) {\n const state = loadSecretsState();\n for (const del of secretChangeSet.deletes) {\n if (state.vaults[del.vaultName]) {\n delete state.vaults[del.vaultName][del.secretName];\n if (Object.keys(state.vaults[del.vaultName]).length === 0) {\n delete state.vaults[del.vaultName];\n }\n }\n }\n for (const del of vaultChangeSet.deletes) {\n delete state.vaults[del.name];\n }\n saveSecretsState(state);\n }\n }\n}\n","import { type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n type CreateStaticWebsiteRequestSchema,\n type DeleteStaticWebsiteRequestSchema,\n type UpdateStaticWebsiteRequestSchema,\n} from \"@tailor-proto/tailor/v1/staticwebsite_pb\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { createChangeSet } from \"./change-set\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { ApplyPhase, PlanContext } from \"@/cli/commands/apply/apply\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Apply static website changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned static website changes\n * @param phase - Apply phase\n * @returns Promise that resolves when static websites are applied\n */\nexport async function applyStaticWebsite(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planStaticWebsite>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n) {\n const { changeSet } = result;\n if (phase === \"create-update\") {\n // StaticWebsites\n await Promise.all([\n ...changeSet.creates.map(async (create) => {\n await client.createStaticWebsite(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.updates.map(async (update) => {\n await client.updateStaticWebsite(update.request);\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n } else if (phase === \"delete\") {\n // Delete in reverse order of dependencies\n // StaticWebsites\n await Promise.all(changeSet.deletes.map((del) => client.deleteStaticWebsite(del.request)));\n }\n}\n\ntype CreateStaticWebsite = {\n name: string;\n request: MessageInitShape<typeof CreateStaticWebsiteRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateStaticWebsite = {\n name: string;\n request: MessageInitShape<typeof UpdateStaticWebsiteRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteStaticWebsite = {\n name: string;\n request: MessageInitShape<typeof DeleteStaticWebsiteRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:staticwebsite:${name}`;\n}\n\n/**\n * Plan static website changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes\n */\nexport async function planStaticWebsite(context: PlanContext) {\n const { client, workspaceId, application, forRemoval } = context;\n const changeSet = createChangeSet<CreateStaticWebsite, UpdateStaticWebsite, DeleteStaticWebsite>(\n \"StaticWebsites\",\n );\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n // Fetch existing static websites\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { staticwebsites, nextPageToken } = await client.listStaticWebsites({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [staticwebsites, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingWebsites: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.name),\n });\n existingWebsites[resource.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n const staticWebsiteServices = forRemoval ? [] : application.staticWebsiteServices;\n for (const websiteService of staticWebsiteServices) {\n const config = websiteService;\n const name = websiteService.name;\n const existing = existingWebsites[name];\n const metaRequest = await buildMetaRequest(trn(workspaceId, name), application.name);\n\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"StaticWebsite\",\n resourceName: name,\n });\n } else if (existing.label !== application.name) {\n conflicts.push({\n resourceType: \"StaticWebsite\",\n resourceName: name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name,\n request: {\n workspaceId,\n staticwebsite: {\n name,\n description: config.description || \"\",\n allowedIpAddresses: config.allowedIpAddresses || [],\n },\n },\n metaRequest,\n });\n delete existingWebsites[name];\n } else {\n changeSet.creates.push({\n name,\n request: {\n workspaceId,\n staticwebsite: {\n name,\n description: config.description || \"\",\n allowedIpAddresses: config.allowedIpAddresses || [],\n },\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingWebsites).forEach(([name]) => {\n const label = existingWebsites[name]?.label;\n if (label && label !== application.name) {\n resourceOwners.add(label);\n }\n // Only delete websites managed by this application\n if (label === application.name) {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n name,\n },\n });\n }\n });\n\n changeSet.print();\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n","/**\n * Migration configuration utilities\n */\n\nimport * as path from \"pathe\";\nimport type { AppConfig } from \"@/types/app-config\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Namespace with migrations configuration\n */\nexport interface NamespaceWithMigrations {\n namespace: string;\n migrationsDir: string;\n}\n\n// ============================================================================\n// Config Helpers\n// ============================================================================\n\nfunction hasMigrationConfig(dbConfig: unknown): dbConfig is { migration: { directory: string } } {\n if (typeof dbConfig !== \"object\" || dbConfig === null) return false;\n if (!(\"migration\" in dbConfig)) return false;\n\n const migration = (dbConfig as { migration: unknown }).migration;\n if (typeof migration !== \"object\" || migration === null) return false;\n if (!(\"directory\" in migration)) return false;\n\n return typeof (migration as { directory: unknown }).directory === \"string\";\n}\n\n/**\n * Get namespaces that have migrations configured\n * @param {AppConfig} config - Application configuration\n * @param {string} configDir - Configuration directory path\n * @returns {NamespaceWithMigrations[]} Array of namespaces with migrations configured\n */\nexport function getNamespacesWithMigrations(\n config: AppConfig,\n configDir: string,\n): NamespaceWithMigrations[] {\n const result: NamespaceWithMigrations[] = [];\n\n for (const namespace of Object.keys(config.db ?? {})) {\n const dbConfig = config.db?.[namespace];\n if (!hasMigrationConfig(dbConfig)) continue;\n\n const migrationsDir = path.resolve(configDir, dbConfig.migration.directory);\n result.push({ namespace, migrationsDir });\n }\n\n return result;\n}\n","/**\n * Diff calculator and formatter for TailorDB schema migrations\n *\n * This module provides utilities for formatting and displaying migration diffs.\n * The actual diff calculation is performed by snapshot.ts.\n */\n\nimport type { SnapshotFieldConfig } from \"./snapshot\";\n\n// ============================================================================\n// Diff Types\n// ============================================================================\n\n/**\n * Current schema snapshot format version\n */\nexport const SCHEMA_SNAPSHOT_VERSION = 1 as const;\n\n/**\n * Change kind in migration diff\n */\nexport type DiffChangeKind =\n | \"type_added\"\n | \"type_removed\"\n | \"type_modified\"\n | \"field_added\"\n | \"field_removed\"\n | \"field_modified\"\n | \"index_added\"\n | \"index_removed\"\n | \"index_modified\"\n | \"file_added\"\n | \"file_removed\"\n | \"file_modified\"\n | \"relationship_added\"\n | \"relationship_removed\"\n | \"relationship_modified\"\n | \"permission_modified\";\n\n/**\n * Single change in migration diff\n */\nexport interface DiffChange {\n kind: DiffChangeKind;\n typeName: string;\n fieldName?: string;\n /** Index name for index_* changes */\n indexName?: string;\n /** Relationship name for relationship_* changes */\n relationshipName?: string;\n /** Relationship type for relationship_* changes */\n relationshipType?: \"forward\" | \"backward\";\n before?: unknown;\n after?: unknown;\n reason?: string;\n}\n\n/**\n * Migration diff - changes between two schema versions\n * Stored as XXXX/diff.json (e.g., 0001/diff.json)\n */\nexport interface MigrationDiff {\n /** Format version for future compatibility */\n version: typeof SCHEMA_SNAPSHOT_VERSION;\n namespace: string;\n createdAt: string;\n description?: string;\n changes: DiffChange[];\n /** Whether there are breaking changes (data loss or constraint violations possible) */\n hasBreakingChanges: boolean;\n /** List of breaking changes */\n breakingChanges: BreakingChangeInfo[];\n /** Whether a migration script is required to handle data migration */\n requiresMigrationScript: boolean;\n}\n\n/**\n * Breaking change information in migration diff\n */\nexport interface BreakingChangeInfo {\n typeName: string;\n fieldName?: string;\n reason: string;\n /** If true, this change is not supported and migration generation will fail */\n unsupported?: boolean;\n /** If true, show 3-step migration instructions for this unsupported change */\n showThreeStepHint?: boolean;\n}\n\n/**\n * Check if a migration diff has any changes\n * @param {MigrationDiff} diff - Migration diff to check\n * @returns {boolean} True if diff has changes\n */\nexport function hasChanges(diff: MigrationDiff): boolean {\n return diff.changes.length > 0;\n}\n\n/**\n * Format a migration diff for display\n * @param {MigrationDiff} diff - Migration diff to format\n * @returns {string} Formatted diff string\n */\nexport function formatMigrationDiff(diff: MigrationDiff): string {\n if (diff.changes.length === 0) {\n return \"No schema differences detected.\";\n }\n\n const lines: string[] = [];\n\n // Group changes by type name\n const changesByType = new Map<string, DiffChange[]>();\n for (const change of diff.changes) {\n const existing = changesByType.get(change.typeName) ?? [];\n existing.push(change);\n changesByType.set(change.typeName, existing);\n }\n\n for (const [typeName, changes] of changesByType) {\n lines.push(`${diff.namespace}.${typeName}:`);\n\n for (const change of changes) {\n lines.push(formatDiffChange(change));\n }\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Format a single diff change for display\n * @param {DiffChange} change - Diff change to format\n * @returns {string} Formatted change string\n */\nfunction formatDiffChange(change: DiffChange): string {\n switch (change.kind) {\n case \"type_added\":\n return ` + [Type] ${change.typeName} (new type)`;\n case \"type_removed\":\n return ` - [Type] ${change.typeName} (removed)`;\n case \"type_modified\":\n return ` ~ [Type] ${change.typeName}: ${change.reason}`;\n case \"field_added\": {\n const field = change.after as SnapshotFieldConfig;\n const typeStr = formatFieldType(field);\n return ` + ${change.fieldName}: ${typeStr}`;\n }\n case \"field_removed\": {\n const field = change.before as SnapshotFieldConfig;\n return ` - ${change.fieldName}: ${field.type}`;\n }\n case \"field_modified\": {\n const before = change.before as SnapshotFieldConfig;\n const after = change.after as SnapshotFieldConfig;\n return ` ~ ${change.fieldName}: ${formatFieldModification(before, after)}`;\n }\n case \"index_added\":\n return ` + [Index] ${change.indexName}`;\n case \"index_removed\":\n return ` - [Index] ${change.indexName}`;\n case \"index_modified\":\n return ` ~ [Index] ${change.indexName}: ${change.reason ?? \"modified\"}`;\n case \"file_added\":\n return ` + [File] ${change.fieldName}`;\n case \"file_removed\":\n return ` - [File] ${change.fieldName}`;\n case \"file_modified\":\n return ` ~ [File] ${change.fieldName}: ${change.reason ?? \"modified\"}`;\n case \"relationship_added\":\n return ` + [Relationship${change.relationshipType ? ` (${change.relationshipType})` : \"\"}] ${change.relationshipName}`;\n case \"relationship_removed\":\n return ` - [Relationship${change.relationshipType ? ` (${change.relationshipType})` : \"\"}] ${change.relationshipName}`;\n case \"relationship_modified\":\n return ` ~ [Relationship${change.relationshipType ? ` (${change.relationshipType})` : \"\"}] ${change.relationshipName}: ${change.reason ?? \"modified\"}`;\n case \"permission_modified\":\n return ` ~ [Permission] ${change.reason ?? \"modified\"}`;\n default:\n return ` ? ${change.typeName}.${change.fieldName ?? \"\"}`;\n }\n}\n\n/**\n * Format field type with attributes\n * @param {SnapshotFieldConfig} field - Field configuration\n * @returns {string} Formatted field type string\n */\nfunction formatFieldType(field: SnapshotFieldConfig): string {\n let type = field.type;\n if (field.array) type += \"[]\";\n if (field.required) type += \" (required)\";\n else type += \" (optional)\";\n return type;\n}\n\n/**\n * Format field modification details\n * @param {SnapshotFieldConfig} before - Before field configuration\n * @param {SnapshotFieldConfig} after - After field configuration\n * @returns {string} Formatted modification details\n */\nfunction formatFieldModification(before: SnapshotFieldConfig, after: SnapshotFieldConfig): string {\n const changes: string[] = [];\n\n if (before.type !== after.type) {\n changes.push(`type: ${before.type} → ${after.type}`);\n }\n if (before.required !== after.required) {\n changes.push(`required: ${before.required} → ${after.required}`);\n }\n if (Boolean(before.array) !== Boolean(after.array)) {\n changes.push(`array: ${before.array ?? false} → ${after.array ?? false}`);\n }\n if (Boolean(before.index) !== Boolean(after.index)) {\n changes.push(`index: ${before.index ?? false} → ${after.index ?? false}`);\n }\n if (Boolean(before.unique) !== Boolean(after.unique)) {\n changes.push(`unique: ${before.unique ?? false} → ${after.unique ?? false}`);\n }\n if (Boolean(before.vector) !== Boolean(after.vector)) {\n changes.push(`vector: ${before.vector ?? false} → ${after.vector ?? false}`);\n }\n\n const beforeAllowed = before.allowedValues ?? [];\n const afterAllowed = after.allowedValues ?? [];\n const afterSet = new Set(afterAllowed.map((v) => v.value));\n const hasAllowedValuesChange =\n beforeAllowed.length !== afterAllowed.length ||\n beforeAllowed.some((v) => !afterSet.has(v.value));\n if (hasAllowedValuesChange) {\n const beforeValues = beforeAllowed.map((v) => v.value).join(\", \");\n const afterValues = afterAllowed.map((v) => v.value).join(\", \");\n changes.push(`allowedValues: [${beforeValues}] → [${afterValues}]`);\n }\n\n const beforeHooks = before.hooks;\n const afterHooks = after.hooks;\n if (\n (beforeHooks?.create?.expr ?? \"\") !== (afterHooks?.create?.expr ?? \"\") ||\n (beforeHooks?.update?.expr ?? \"\") !== (afterHooks?.update?.expr ?? \"\")\n ) {\n changes.push(\"hooks modified\");\n }\n\n const beforeValidate = before.validate ?? [];\n const afterValidate = after.validate ?? [];\n if (beforeValidate.length !== afterValidate.length) {\n changes.push(`validations: ${beforeValidate.length} → ${afterValidate.length}`);\n }\n\n if (Boolean(before.serial) !== Boolean(after.serial)) {\n changes.push(\n `serial: ${before.serial ? \"enabled\" : \"disabled\"} → ${after.serial ? \"enabled\" : \"disabled\"}`,\n );\n }\n\n return changes.join(\", \");\n}\n\n/**\n * Format breaking changes for display\n * @param {BreakingChangeInfo[]} breakingChanges - Breaking changes to format\n * @returns {string} Formatted breaking changes string\n */\nexport function formatBreakingChanges(breakingChanges: BreakingChangeInfo[]): string {\n if (breakingChanges.length === 0) {\n return \"\";\n }\n\n const lines: string[] = [\"Breaking changes detected:\", \"\"];\n\n for (const bc of breakingChanges) {\n const location = bc.fieldName ? `${bc.typeName}.${bc.fieldName}` : bc.typeName;\n lines.push(` - ${location}: ${bc.reason}`);\n }\n\n return lines.join(\"\\n\");\n}\n\nconst DIFF_CHANGE_LABELS: Record<DiffChangeKind, string> = {\n type_added: \"type(s) added\",\n type_removed: \"type(s) removed\",\n type_modified: \"type(s) modified\",\n field_added: \"field(s) added\",\n field_removed: \"field(s) removed\",\n field_modified: \"field(s) modified\",\n index_added: \"index(es) added\",\n index_removed: \"index(es) removed\",\n index_modified: \"index(es) modified\",\n file_added: \"file field(s) added\",\n file_removed: \"file field(s) removed\",\n file_modified: \"file field(s) modified\",\n relationship_added: \"relationship(s) added\",\n relationship_removed: \"relationship(s) removed\",\n relationship_modified: \"relationship(s) modified\",\n permission_modified: \"permission(s) modified\",\n};\n\n/**\n * Format a summary of the migration diff\n * @param {MigrationDiff} diff - Migration diff to summarize\n * @returns {string} Formatted summary string\n */\nexport function formatDiffSummary(diff: MigrationDiff): string {\n const stats: Partial<Record<DiffChangeKind, number>> = {};\n for (const change of diff.changes) {\n stats[change.kind] = (stats[change.kind] ?? 0) + 1;\n }\n\n const parts = Object.keys(stats).map(\n (kind) => `${stats[kind as DiffChangeKind]} ${DIFF_CHANGE_LABELS[kind as DiffChangeKind]}`,\n );\n\n return parts.length > 0 ? parts.join(\", \") : \"No changes\";\n}\n","/**\n * Schema snapshot management for TailorDB migrations\n */\n\nimport * as fs from \"node:fs\";\nimport * as path from \"pathe\";\nimport {\n type MigrationDiff,\n type DiffChange,\n type BreakingChangeInfo,\n SCHEMA_SNAPSHOT_VERSION,\n} from \"./diff-calculator\";\nimport type { SchemaDrift } from \"./types\";\nimport type {\n ParsedField,\n TailorDBType,\n OperatorFieldConfig,\n StandardActionPermission,\n} from \"@/types/tailordb\";\nimport type { TailorDBType as ProtoTailorDBType } from \"@tailor-proto/tailor/v1/tailordb_resource_pb\";\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/**\n * Initial schema migration number (0000)\n */\nexport const INITIAL_SCHEMA_NUMBER = 0;\n\n/**\n * Migration file names (used within migration directories)\n */\nexport const SCHEMA_FILE_NAME = \"schema.json\";\nexport const DIFF_FILE_NAME = \"diff.json\";\nexport const MIGRATE_FILE_NAME = \"migrate.ts\";\nexport const DB_TYPES_FILE_NAME = \"db.ts\";\n\n/**\n * Pattern for validating migration number format (4-digit sequential number)\n * Examples: 0001, 0002, 0003, ...\n */\nexport const MIGRATION_NUMBER_PATTERN = /^\\d{4}$/;\n\n// Re-export SCHEMA_SNAPSHOT_VERSION for convenience\nexport { SCHEMA_SNAPSHOT_VERSION };\n\n// ============================================================================\n// Snapshot Types\n// ============================================================================\n\n/**\n * Hook configuration in schema snapshot\n */\nexport interface SnapshotHook {\n expr: string;\n}\n\n/**\n * Validation configuration in schema snapshot\n */\nexport interface SnapshotValidation {\n script: { expr: string };\n errorMessage: string;\n}\n\n/**\n * Serial configuration in schema snapshot\n */\nexport interface SnapshotSerial {\n start: number;\n maxValue?: number;\n format?: string;\n}\n\n/**\n * Enum value with optional description in schema snapshot\n */\nexport interface SnapshotEnumValue {\n value: string;\n description?: string;\n}\n\n/**\n * Field configuration in schema snapshot\n */\nexport interface SnapshotFieldConfig {\n type: string;\n required: boolean;\n array?: boolean;\n index?: boolean;\n unique?: boolean;\n allowedValues?: SnapshotEnumValue[];\n foreignKey?: boolean;\n foreignKeyType?: string;\n foreignKeyField?: string;\n description?: string;\n vector?: boolean;\n hooks?: {\n create?: SnapshotHook;\n update?: SnapshotHook;\n };\n validate?: SnapshotValidation[];\n serial?: SnapshotSerial;\n scale?: number;\n /** Nested fields (recursive) */\n fields?: Record<string, SnapshotFieldConfig>;\n}\n\n/**\n * Index configuration in schema snapshot\n */\nexport interface SnapshotIndexConfig {\n fields: string[];\n unique?: boolean;\n}\n\n/**\n * Relationship configuration in schema snapshot\n */\nexport interface SnapshotRelationship {\n targetType: string;\n targetField: string;\n sourceField: string;\n isArray: boolean;\n description: string;\n}\n\n// ============================================================================\n// Permission Types\n// ============================================================================\n\n/**\n * Permission operand types\n */\nexport type SnapshotPermissionOperand =\n | { user: string }\n | { record: string }\n | { newRecord: string }\n | { oldRecord: string }\n | unknown; // ValueOperand (primitives, arrays)\n\n/**\n * Permission operators\n */\nexport type SnapshotPermissionOperator = \"eq\" | \"ne\" | \"in\" | \"nin\" | \"hasAny\" | \"nhasAny\";\n\n/**\n * Permission condition tuple\n */\nexport type SnapshotPermissionCondition = readonly [\n SnapshotPermissionOperand,\n SnapshotPermissionOperator,\n SnapshotPermissionOperand,\n];\n\n/**\n * Action permission policy\n */\nexport interface SnapshotActionPermission {\n conditions: readonly SnapshotPermissionCondition[];\n description?: string;\n permit: \"allow\" | \"deny\";\n}\n\n/**\n * Record-level permission configuration\n */\nexport interface SnapshotRecordPermission {\n create: readonly SnapshotActionPermission[];\n read: readonly SnapshotActionPermission[];\n update: readonly SnapshotActionPermission[];\n delete: readonly SnapshotActionPermission[];\n}\n\n/**\n * GQL permission actions\n */\nexport type SnapshotGqlAction =\n | \"read\"\n | \"create\"\n | \"update\"\n | \"delete\"\n | \"aggregate\"\n | \"bulkUpsert\"\n | \"all\";\n\n/**\n * GQL permission policy\n */\nexport interface SnapshotGqlPermissionPolicy {\n conditions: readonly SnapshotPermissionCondition[];\n actions: readonly SnapshotGqlAction[];\n permit: \"allow\" | \"deny\";\n description?: string;\n}\n\n/**\n * GQL permission configuration\n */\nexport type SnapshotGqlPermission = readonly SnapshotGqlPermissionPolicy[];\n\n/**\n * Type definition in schema snapshot\n */\nexport interface SnapshotType {\n name: string;\n pluralForm?: string;\n description?: string;\n fields: Record<string, SnapshotFieldConfig>;\n settings?: {\n aggregation?: boolean;\n bulkUpsert?: boolean;\n gqlOperations?: {\n create?: boolean;\n update?: boolean;\n delete?: boolean;\n read?: boolean;\n };\n publishEvents?: boolean;\n };\n indexes?: Record<string, SnapshotIndexConfig>;\n files?: Record<string, string>;\n forwardRelationships?: Record<string, SnapshotRelationship>;\n backwardRelationships?: Record<string, SnapshotRelationship>;\n permissions?: {\n record?: SnapshotRecordPermission;\n gql?: SnapshotGqlPermission;\n };\n}\n\n/**\n * Schema snapshot - full schema state at a point in time\n * Stored as XXXX/schema.json (e.g., 0000/schema.json for initial snapshot)\n */\nexport interface SchemaSnapshot {\n /** Format version for future compatibility */\n version: typeof SCHEMA_SNAPSHOT_VERSION;\n namespace: string;\n createdAt: string;\n types: Record<string, SnapshotType>;\n}\n\n/**\n * Migration file type\n */\nexport type MigrationFileType = \"schema\" | \"diff\" | \"migrate\" | \"db\";\n\n/**\n * Information about a migration\n */\nexport interface MigrationInfo {\n /** Migration number (e.g., 1, 2, 3) */\n number: number;\n /** Migration number as 4-digit string (e.g., \"0001\", \"0002\") */\n numberStr: string;\n /** Migration file type */\n type: MigrationFileType;\n /** Path to migration file */\n path: string;\n /** Parsed content (schema snapshot or diff) */\n content: SchemaSnapshot | MigrationDiff;\n}\n\n// ============================================================================\n// Migration Number Helpers\n// ============================================================================\n\n/**\n * Validate that a migration number follows the expected format (4-digit number)\n * @param {string} numberStr - Migration number string to validate\n * @returns {boolean} True if number matches expected format\n */\nexport function isValidMigrationNumber(numberStr: string): boolean {\n return MIGRATION_NUMBER_PATTERN.test(numberStr);\n}\n\n/**\n * Format migration number as 4-digit string\n * @param {number} num - Migration number\n * @returns {string} 4-digit padded string (e.g., \"0001\")\n */\nexport function formatMigrationNumber(num: number): string {\n return num.toString().padStart(4, \"0\");\n}\n\n/**\n * Parse migration number from file name\n * @param {string} fileName - File name (e.g., \"0001_schema.json\")\n * @returns {number | null} Parsed number or null if invalid\n */\nexport function parseMigrationNumber(fileName: string): number | null {\n const match = fileName.match(/^(\\d{4})_/);\n if (!match) return null;\n const num = parseInt(match[1], 10);\n return isNaN(num) ? null : num;\n}\n\n// ============================================================================\n// Path Helpers\n// ============================================================================\n\n/**\n * Map of migration file types to their file names\n */\nconst MIGRATION_FILE_NAMES: Record<MigrationFileType, string> = {\n schema: SCHEMA_FILE_NAME,\n diff: DIFF_FILE_NAME,\n migrate: MIGRATE_FILE_NAME,\n db: DB_TYPES_FILE_NAME,\n};\n\n/**\n * Get migration directory path for a given number\n * @param {string} migrationsDir - Base migrations directory path\n * @param {number} num - Migration number\n * @returns {string} Full directory path for the migration\n */\nexport function getMigrationDirPath(migrationsDir: string, num: number): string {\n const numStr = formatMigrationNumber(num);\n return path.join(migrationsDir, numStr);\n}\n\n/**\n * Get migration file path for a given number and type\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} num - Migration number\n * @param {MigrationFileType} type - File type\n * @returns {string} Full file path\n */\nexport function getMigrationFilePath(\n migrationsDir: string,\n num: number,\n type: MigrationFileType,\n): string {\n const migrationDir = getMigrationDirPath(migrationsDir, num);\n return path.join(migrationDir, MIGRATION_FILE_NAMES[type]);\n}\n\n// ============================================================================\n// Snapshot Creation\n// ============================================================================\n\n/**\n * Create a snapshot field config from a parsed field\n * @param {ParsedField} field - Parsed field definition\n * @returns {SnapshotFieldConfig} Snapshot field configuration\n */\nfunction createSnapshotFieldConfig(field: ParsedField): SnapshotFieldConfig {\n // Note: Use `!== false` to match generateParsedTailorDBTypeManifest behavior\n // where undefined defaults to true (required by default in SDK)\n const config: SnapshotFieldConfig = {\n type: field.config.type,\n required: field.config.required !== false,\n };\n\n if (field.config.array) config.array = true;\n if (field.config.index) config.index = true;\n if (field.config.unique) config.unique = true;\n\n if (field.config.allowedValues && field.config.allowedValues.length > 0) {\n config.allowedValues = field.config.allowedValues.map((v) => ({\n value: v.value,\n ...(v.description && { description: v.description }),\n }));\n }\n\n if (field.config.foreignKey) {\n config.foreignKey = true;\n if (field.config.foreignKeyType) config.foreignKeyType = field.config.foreignKeyType;\n if (field.config.foreignKeyField) config.foreignKeyField = field.config.foreignKeyField;\n }\n\n if (field.config.description) config.description = field.config.description;\n if (field.config.vector) config.vector = true;\n\n if (field.config.hooks) {\n config.hooks = {};\n if (field.config.hooks.create) {\n config.hooks.create = { expr: field.config.hooks.create.expr };\n }\n if (field.config.hooks.update) {\n config.hooks.update = { expr: field.config.hooks.update.expr };\n }\n }\n\n if (field.config.validate && field.config.validate.length > 0) {\n config.validate = field.config.validate.map((v) => ({\n script: { expr: v.script.expr },\n errorMessage: v.errorMessage,\n }));\n }\n\n if (field.config.serial) {\n config.serial = {\n start: field.config.serial.start,\n ...(field.config.serial.maxValue !== undefined && { maxValue: field.config.serial.maxValue }),\n ...(field.config.serial.format && { format: field.config.serial.format }),\n };\n }\n\n if (field.config.scale !== undefined) config.scale = field.config.scale;\n\n if (field.config.fields && Object.keys(field.config.fields).length > 0) {\n config.fields = {};\n for (const [nestedName, nestedConfig] of Object.entries(field.config.fields)) {\n config.fields[nestedName] = createSnapshotFieldConfigFromOperatorConfig(nestedConfig);\n }\n }\n\n return config;\n}\n\n/**\n * Create a snapshot field config from an OperatorFieldConfig (for nested fields)\n * @param {import(\"@/types/tailordb\").OperatorFieldConfig} fieldConfig - Field configuration\n * @returns {SnapshotFieldConfig} Snapshot field configuration\n */\nfunction createSnapshotFieldConfigFromOperatorConfig(\n fieldConfig: OperatorFieldConfig,\n): SnapshotFieldConfig {\n const config: SnapshotFieldConfig = {\n type: fieldConfig.type,\n required: fieldConfig.required !== false,\n };\n\n if (fieldConfig.array) config.array = true;\n if (fieldConfig.index) config.index = true;\n if (fieldConfig.unique) config.unique = true;\n\n if (fieldConfig.allowedValues && fieldConfig.allowedValues.length > 0) {\n config.allowedValues = fieldConfig.allowedValues.map((v) => ({\n value: v.value,\n ...(v.description && { description: v.description }),\n }));\n }\n\n if (fieldConfig.foreignKey) {\n config.foreignKey = true;\n if (fieldConfig.foreignKeyType) config.foreignKeyType = fieldConfig.foreignKeyType;\n if (fieldConfig.foreignKeyField) config.foreignKeyField = fieldConfig.foreignKeyField;\n }\n\n if (fieldConfig.description) config.description = fieldConfig.description;\n if (fieldConfig.vector) config.vector = true;\n\n if (fieldConfig.hooks) {\n config.hooks = {};\n if (fieldConfig.hooks.create) {\n config.hooks.create = { expr: fieldConfig.hooks.create.expr };\n }\n if (fieldConfig.hooks.update) {\n config.hooks.update = { expr: fieldConfig.hooks.update.expr };\n }\n }\n\n if (fieldConfig.validate && fieldConfig.validate.length > 0) {\n config.validate = fieldConfig.validate.map((v) => ({\n script: { expr: v.script.expr },\n errorMessage: v.errorMessage,\n }));\n }\n\n if (fieldConfig.serial) {\n config.serial = {\n start: fieldConfig.serial.start,\n ...(fieldConfig.serial.maxValue !== undefined && { maxValue: fieldConfig.serial.maxValue }),\n ...(fieldConfig.serial.format && { format: fieldConfig.serial.format }),\n };\n }\n\n if (fieldConfig.scale !== undefined) config.scale = fieldConfig.scale;\n\n // Recursive for nested fields\n if (fieldConfig.fields && Object.keys(fieldConfig.fields).length > 0) {\n config.fields = {};\n for (const [nestedName, nestedConfig] of Object.entries(fieldConfig.fields)) {\n config.fields[nestedName] = createSnapshotFieldConfigFromOperatorConfig(nestedConfig);\n }\n }\n\n return config;\n}\n\n/**\n * Create a snapshot type from a parsed type\n * @param {TailorDBType} type - Parsed TailorDB type definition\n * @returns {SnapshotType} Snapshot type configuration\n */\nfunction createSnapshotType(type: TailorDBType): SnapshotType {\n const fields: Record<string, SnapshotFieldConfig> = {};\n\n for (const [fieldName, field] of Object.entries(type.fields)) {\n fields[fieldName] = createSnapshotFieldConfig(field);\n }\n\n const snapshotType: SnapshotType = {\n name: type.name,\n fields,\n };\n\n if (type.pluralForm) snapshotType.pluralForm = type.pluralForm;\n if (type.description) snapshotType.description = type.description;\n if (type.settings) {\n snapshotType.settings = {};\n if (type.settings.aggregation !== undefined) {\n snapshotType.settings.aggregation = type.settings.aggregation;\n }\n if (type.settings.bulkUpsert !== undefined) {\n snapshotType.settings.bulkUpsert = type.settings.bulkUpsert;\n }\n if (type.settings.gqlOperations) {\n // gqlOperations is already normalized by schema transform\n const ops = type.settings.gqlOperations;\n snapshotType.settings.gqlOperations = {\n ...(ops.create !== undefined && {\n create: ops.create,\n }),\n ...(ops.update !== undefined && {\n update: ops.update,\n }),\n ...(ops.delete !== undefined && {\n delete: ops.delete,\n }),\n ...(ops.read !== undefined && {\n read: ops.read,\n }),\n };\n }\n if (type.settings.publishEvents !== undefined) {\n snapshotType.settings.publishEvents = type.settings.publishEvents;\n }\n }\n\n if (type.indexes && Object.keys(type.indexes).length > 0) {\n snapshotType.indexes = {};\n for (const [indexName, indexConfig] of Object.entries(type.indexes)) {\n snapshotType.indexes[indexName] = {\n fields: indexConfig.fields,\n unique: indexConfig.unique,\n };\n }\n }\n\n if (type.files && Object.keys(type.files).length > 0) {\n snapshotType.files = { ...type.files };\n }\n\n if (Object.keys(type.forwardRelationships).length > 0) {\n snapshotType.forwardRelationships = {};\n for (const [relName, rel] of Object.entries(type.forwardRelationships)) {\n snapshotType.forwardRelationships[relName] = {\n targetType: rel.targetType,\n targetField: rel.targetField,\n sourceField: rel.sourceField,\n isArray: rel.isArray,\n description: rel.description,\n };\n }\n }\n\n if (Object.keys(type.backwardRelationships).length > 0) {\n snapshotType.backwardRelationships = {};\n for (const [relName, rel] of Object.entries(type.backwardRelationships)) {\n snapshotType.backwardRelationships[relName] = {\n targetType: rel.targetType,\n targetField: rel.targetField,\n sourceField: rel.sourceField,\n isArray: rel.isArray,\n description: rel.description,\n };\n }\n }\n\n if (type.permissions.record || type.permissions.gql) {\n snapshotType.permissions = {};\n\n if (type.permissions.record) {\n snapshotType.permissions.record = {\n create: type.permissions.record.create.map(convertActionPermission),\n read: type.permissions.record.read.map(convertActionPermission),\n update: type.permissions.record.update.map(convertActionPermission),\n delete: type.permissions.record.delete.map(convertActionPermission),\n };\n }\n\n if (type.permissions.gql) {\n snapshotType.permissions.gql = type.permissions.gql.map((policy) => ({\n conditions: policy.conditions as SnapshotPermissionCondition[],\n actions: policy.actions as SnapshotGqlAction[],\n permit: policy.permit,\n ...(policy.description && { description: policy.description }),\n }));\n }\n }\n\n return snapshotType;\n}\n\n/**\n * Convert an action permission to snapshot format\n * @param {StandardActionPermission<\"record\">} permission - Action permission\n * @returns {SnapshotActionPermission} Snapshot action permission\n */\nfunction convertActionPermission(\n permission: StandardActionPermission<\"record\">,\n): SnapshotActionPermission {\n return {\n conditions: permission.conditions as SnapshotPermissionCondition[],\n permit: permission.permit,\n ...(permission.description && { description: permission.description }),\n };\n}\n\n/**\n * Create a schema snapshot from local type definitions\n * @param {Record<string, TailorDBType>} types - Local type definitions\n * @param {string} namespace - Namespace for the snapshot\n * @returns {SchemaSnapshot} Schema snapshot\n */\nexport function createSnapshotFromLocalTypes(\n types: Record<string, TailorDBType>,\n namespace: string,\n): SchemaSnapshot {\n const snapshotTypes: Record<string, SnapshotType> = {};\n\n for (const [typeName, type] of Object.entries(types)) {\n snapshotTypes[typeName] = createSnapshotType(type);\n }\n\n return {\n version: SCHEMA_SNAPSHOT_VERSION,\n namespace,\n createdAt: new Date().toISOString(),\n types: snapshotTypes,\n };\n}\n\n// ============================================================================\n// Snapshot Loading\n// ============================================================================\n\n/**\n * Load a schema snapshot from a file\n * @param {string} filePath - Path to the snapshot file\n * @returns {SchemaSnapshot} Loaded schema snapshot\n */\nexport function loadSnapshot(filePath: string): SchemaSnapshot {\n const content = fs.readFileSync(filePath, \"utf-8\");\n return JSON.parse(content) as SchemaSnapshot;\n}\n\n/**\n * Load a migration diff from a file\n * @param {string} filePath - Path to the diff file\n * @returns {MigrationDiff} Loaded migration diff\n */\nexport function loadDiff(filePath: string): MigrationDiff {\n const content = fs.readFileSync(filePath, \"utf-8\");\n return JSON.parse(content) as MigrationDiff;\n}\n\n/**\n * Get all migration directories and their files, sorted by number\n * @param {string} migrationsDir - Migrations directory path\n * @returns {Array<{number: number, type: \"schema\" | \"diff\", path: string}>} Migration files sorted by number\n */\nexport function getMigrationFiles(\n migrationsDir: string,\n): { number: number; type: \"schema\" | \"diff\"; path: string }[] {\n if (!fs.existsSync(migrationsDir)) {\n return [];\n }\n\n const entries = fs.readdirSync(migrationsDir, { withFileTypes: true });\n const migrations: {\n number: number;\n type: \"schema\" | \"diff\";\n path: string;\n }[] = [];\n\n for (const entry of entries) {\n // Only process directories with valid migration numbers (e.g., \"0000\", \"0001\")\n if (!entry.isDirectory()) continue;\n if (!isValidMigrationNumber(entry.name)) continue;\n\n const num = parseInt(entry.name, 10);\n const migrationDir = path.join(migrationsDir, entry.name);\n\n // Check for schema.json\n const schemaPath = path.join(migrationDir, SCHEMA_FILE_NAME);\n if (fs.existsSync(schemaPath)) {\n migrations.push({\n number: num,\n type: \"schema\",\n path: schemaPath,\n });\n }\n\n // Check for diff.json\n const diffPath = path.join(migrationDir, DIFF_FILE_NAME);\n if (fs.existsSync(diffPath)) {\n migrations.push({\n number: num,\n type: \"diff\",\n path: diffPath,\n });\n }\n }\n\n // Sort by number\n migrations.sort((a, b) => a.number - b.number);\n return migrations;\n}\n\n/**\n * Get the next migration number for a directory\n * Returns INITIAL_SCHEMA_NUMBER (0) if no migrations exist\n * @param {string} migrationsDir - Migrations directory path\n * @returns {number} Next migration number\n */\nexport function getNextMigrationNumber(migrationsDir: string): number {\n const files = getMigrationFiles(migrationsDir);\n if (files.length === 0) return INITIAL_SCHEMA_NUMBER;\n return Math.max(...files.map((f) => f.number)) + 1;\n}\n\n/**\n * Apply a diff to a snapshot to get the resulting snapshot\n * @param {SchemaSnapshot} snapshot - Base snapshot to apply diff to\n * @param {MigrationDiff} diff - Diff to apply\n * @returns {SchemaSnapshot} Resulting snapshot after applying diff\n */\nfunction applyDiffToSnapshot(snapshot: SchemaSnapshot, diff: MigrationDiff): SchemaSnapshot {\n const types = { ...snapshot.types };\n\n for (const change of diff.changes) {\n switch (change.kind) {\n case \"type_added\":\n types[change.typeName] = change.after as SnapshotType;\n break;\n case \"type_removed\":\n delete types[change.typeName];\n break;\n case \"type_modified\":\n if (types[change.typeName] && change.after) {\n const after = change.after as {\n indexes?: Record<string, SnapshotIndexConfig>;\n files?: Record<string, string>;\n };\n types[change.typeName] = {\n ...types[change.typeName],\n ...(after.indexes !== undefined && { indexes: after.indexes }),\n ...(after.files !== undefined && { files: after.files }),\n };\n }\n break;\n case \"field_added\":\n case \"field_modified\":\n if (types[change.typeName] && change.fieldName) {\n types[change.typeName] = {\n ...types[change.typeName],\n fields: {\n ...types[change.typeName].fields,\n [change.fieldName]: change.after as SnapshotFieldConfig,\n },\n };\n }\n break;\n case \"field_removed\":\n if (types[change.typeName] && change.fieldName) {\n const { [change.fieldName]: _, ...remainingFields } = types[change.typeName].fields;\n types[change.typeName] = {\n ...types[change.typeName],\n fields: remainingFields,\n };\n }\n break;\n case \"index_added\":\n case \"index_modified\":\n if (types[change.typeName] && change.indexName) {\n types[change.typeName] = {\n ...types[change.typeName],\n indexes: {\n ...types[change.typeName].indexes,\n [change.indexName]: change.after as SnapshotIndexConfig,\n },\n };\n }\n break;\n case \"index_removed\":\n if (types[change.typeName] && change.indexName && types[change.typeName].indexes) {\n const { [change.indexName]: _, ...remainingIndexes } = types[change.typeName].indexes!;\n types[change.typeName] = {\n ...types[change.typeName],\n indexes: Object.keys(remainingIndexes).length > 0 ? remainingIndexes : undefined,\n };\n }\n break;\n case \"file_added\":\n case \"file_modified\":\n if (types[change.typeName] && change.fieldName) {\n types[change.typeName] = {\n ...types[change.typeName],\n files: {\n ...types[change.typeName].files,\n [change.fieldName]: change.after as string,\n },\n };\n }\n break;\n case \"file_removed\":\n if (types[change.typeName] && change.fieldName && types[change.typeName].files) {\n const { [change.fieldName]: _, ...remainingFiles } = types[change.typeName].files!;\n types[change.typeName] = {\n ...types[change.typeName],\n files: Object.keys(remainingFiles).length > 0 ? remainingFiles : undefined,\n };\n }\n break;\n case \"relationship_added\":\n case \"relationship_modified\":\n if (types[change.typeName] && change.relationshipName) {\n const rel = change.after as SnapshotRelationship;\n // Use relationshipType if specified, fallback to existing logic for backwards compatibility\n const targetType =\n change.relationshipType ??\n (types[change.typeName].forwardRelationships?.[change.relationshipName]\n ? \"forward\"\n : types[change.typeName].backwardRelationships?.[change.relationshipName]\n ? \"backward\"\n : \"forward\");\n\n if (targetType === \"forward\") {\n types[change.typeName] = {\n ...types[change.typeName],\n forwardRelationships: {\n ...types[change.typeName].forwardRelationships,\n [change.relationshipName]: rel,\n },\n };\n } else {\n types[change.typeName] = {\n ...types[change.typeName],\n backwardRelationships: {\n ...types[change.typeName].backwardRelationships,\n [change.relationshipName]: rel,\n },\n };\n }\n }\n break;\n case \"relationship_removed\":\n if (types[change.typeName] && change.relationshipName) {\n const type = types[change.typeName];\n // Use relationshipType if specified\n const targetType =\n change.relationshipType ??\n (type.forwardRelationships?.[change.relationshipName]\n ? \"forward\"\n : type.backwardRelationships?.[change.relationshipName]\n ? \"backward\"\n : null);\n\n if (targetType === \"forward\" && type.forwardRelationships?.[change.relationshipName]) {\n const { [change.relationshipName]: _, ...remaining } = type.forwardRelationships;\n types[change.typeName] = {\n ...type,\n forwardRelationships: Object.keys(remaining).length > 0 ? remaining : undefined,\n };\n } else if (\n targetType === \"backward\" &&\n type.backwardRelationships?.[change.relationshipName]\n ) {\n const { [change.relationshipName]: _, ...remaining } = type.backwardRelationships;\n types[change.typeName] = {\n ...type,\n backwardRelationships: Object.keys(remaining).length > 0 ? remaining : undefined,\n };\n }\n }\n break;\n case \"permission_modified\":\n if (types[change.typeName] && change.after) {\n const after = change.after as {\n recordPermission?: SnapshotRecordPermission;\n gqlPermission?: SnapshotGqlPermission;\n };\n types[change.typeName] = {\n ...types[change.typeName],\n permissions: {\n record: after.recordPermission,\n gql: after.gqlPermission,\n },\n };\n }\n break;\n }\n }\n\n return {\n ...snapshot,\n types,\n createdAt: diff.createdAt,\n };\n}\n\n/**\n * Reconstruct the latest schema snapshot from all migration files\n * Returns null if no migrations exist\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} [maxVersion] - Optional maximum migration version to apply\n * @returns {SchemaSnapshot | null} Reconstructed snapshot or null if no migrations exist\n */\nexport function reconstructSnapshotFromMigrations(\n migrationsDir: string,\n maxVersion?: number,\n): SchemaSnapshot | null {\n const files = getMigrationFiles(migrationsDir);\n if (files.length === 0) return null;\n\n // Find the initial schema file (should be 0000/schema.json)\n const schemaFile = files.find((f) => f.type === \"schema\" && f.number === INITIAL_SCHEMA_NUMBER);\n if (!schemaFile) {\n throw new Error(\n `No initial schema file found in ${migrationsDir}. Expected ${formatMigrationNumber(\n INITIAL_SCHEMA_NUMBER,\n )}/schema.json`,\n );\n }\n\n let snapshot = loadSnapshot(schemaFile.path);\n\n // Apply subsequent diffs in order (up to maxVersion if specified)\n for (const file of files) {\n if (file.type === \"diff\" && file.number > schemaFile.number) {\n // Skip diffs beyond maxVersion if specified\n if (maxVersion !== undefined && file.number > maxVersion) {\n continue;\n }\n const diff = loadDiff(file.path);\n snapshot = applyDiffToSnapshot(snapshot, diff);\n }\n }\n\n return snapshot;\n}\n\n/**\n * Get the latest migration number from a directory\n * Returns 0 if no migrations exist\n * @param {string} migrationsDir - Migrations directory path\n * @returns {number} Latest migration number or 0 if no migrations exist\n */\nexport function getLatestMigrationNumber(migrationsDir: string): number {\n const files = getMigrationFiles(migrationsDir);\n if (files.length === 0) return 0;\n return Math.max(...files.map((f) => f.number));\n}\n\n// ============================================================================\n// Snapshot Comparison\n// ============================================================================\n\n/**\n * Compare two field configs and determine if they are different\n * @param {SnapshotFieldConfig} oldField - Old field configuration\n * @param {SnapshotFieldConfig} newField - New field configuration\n * @returns {boolean} True if fields are different\n */\nfunction areFieldsDifferent(oldField: SnapshotFieldConfig, newField: SnapshotFieldConfig): boolean {\n // Compare required properties\n if (oldField.type !== newField.type) return true;\n if (oldField.required !== newField.required) return true;\n\n // Compare optional boolean properties (default to false)\n const booleanProps = [\"array\", \"index\", \"unique\", \"foreignKey\", \"vector\"] as const;\n for (const prop of booleanProps) {\n if ((oldField[prop] ?? false) !== (newField[prop] ?? false)) return true;\n }\n\n // Compare foreign key properties\n if (oldField.foreignKeyType !== newField.foreignKeyType) return true;\n if (oldField.foreignKeyField !== newField.foreignKeyField) return true;\n\n if ((oldField.description ?? \"\") !== (newField.description ?? \"\")) return true;\n\n const oldAllowed = oldField.allowedValues ?? [];\n const newAllowed = newField.allowedValues ?? [];\n if (oldAllowed.length !== newAllowed.length) return true;\n const newAllowedMap = new Map(newAllowed.map((v) => [v.value, v.description]));\n for (const v of oldAllowed) {\n if (!newAllowedMap.has(v.value)) return true;\n if ((v.description ?? \"\") !== (newAllowedMap.get(v.value) ?? \"\")) return true;\n }\n\n const oldHooks = oldField.hooks;\n const newHooks = newField.hooks;\n if (Boolean(oldHooks) !== Boolean(newHooks)) return true;\n if (oldHooks && newHooks) {\n if ((oldHooks.create?.expr ?? \"\") !== (newHooks.create?.expr ?? \"\")) return true;\n if ((oldHooks.update?.expr ?? \"\") !== (newHooks.update?.expr ?? \"\")) return true;\n }\n\n const oldValidate = oldField.validate ?? [];\n const newValidate = newField.validate ?? [];\n if (oldValidate.length !== newValidate.length) return true;\n for (let i = 0; i < oldValidate.length; i++) {\n if (oldValidate[i].script.expr !== newValidate[i].script.expr) return true;\n if (oldValidate[i].errorMessage !== newValidate[i].errorMessage) return true;\n }\n\n const oldSerial = oldField.serial;\n const newSerial = newField.serial;\n if (Boolean(oldSerial) !== Boolean(newSerial)) return true;\n if (oldSerial && newSerial) {\n if (oldSerial.start !== newSerial.start) return true;\n if (oldSerial.maxValue !== newSerial.maxValue) return true;\n if ((oldSerial.format ?? \"\") !== (newSerial.format ?? \"\")) return true;\n }\n\n if (oldField.scale !== newField.scale) return true;\n\n const oldFields = oldField.fields ?? {};\n const newFields = newField.fields ?? {};\n const oldFieldNames = Object.keys(oldFields);\n const newFieldNames = Object.keys(newFields);\n if (oldFieldNames.length !== newFieldNames.length) return true;\n for (const fieldName of oldFieldNames) {\n if (!newFields[fieldName]) return true;\n if (areFieldsDifferent(oldFields[fieldName], newFields[fieldName])) return true;\n }\n\n return false;\n}\n\n/**\n * Determine if a field change is a breaking change\n * @param {string} typeName - Name of the type containing the field\n * @param {string} fieldName - Name of the field being changed\n * @param {SnapshotFieldConfig | undefined} oldField - Old field configuration\n * @param {SnapshotFieldConfig | undefined} newField - New field configuration\n * @returns {BreakingChangeInfo | null} Breaking change info or null if not breaking\n */\nfunction isBreakingFieldChange(\n typeName: string,\n fieldName: string,\n oldField: SnapshotFieldConfig | undefined,\n newField: SnapshotFieldConfig | undefined,\n): BreakingChangeInfo | null {\n // Field added as required - breaking (existing records don't have this value)\n if (!oldField && newField && newField.required) {\n return {\n typeName,\n fieldName,\n reason: \"Required field added\",\n };\n }\n\n // Field type changed - unsupported (requires 3-step migration)\n if (oldField && newField && oldField.type !== newField.type) {\n return {\n typeName,\n fieldName,\n reason: `Field type changed from ${oldField.type} to ${newField.type}`,\n unsupported: true,\n showThreeStepHint: true,\n };\n }\n\n // Optional to required - breaking\n if (oldField && newField && !oldField.required && newField.required) {\n return {\n typeName,\n fieldName,\n reason: \"Field changed from optional to required\",\n };\n }\n\n // Array property changed - unsupported (requires 3-step migration)\n if (oldField && newField && (oldField.array ?? false) !== (newField.array ?? false)) {\n const [fromType, toType] = oldField.array\n ? [\"array\", \"single value\"]\n : [\"single value\", \"array\"];\n return {\n typeName,\n fieldName,\n reason: `Field changed from ${fromType} to ${toType}`,\n unsupported: true,\n showThreeStepHint: true,\n };\n }\n\n // Foreign key relationship changed - breaking (existing references may become invalid)\n if (oldField && newField) {\n const oldForeignKeyType = oldField.foreignKeyType;\n const newForeignKeyType = newField.foreignKeyType;\n if (oldForeignKeyType && newForeignKeyType && oldForeignKeyType !== newForeignKeyType) {\n return {\n typeName,\n fieldName,\n reason: `Foreign key target type changed from ${oldForeignKeyType} to ${newForeignKeyType}`,\n };\n }\n }\n\n // Unique constraint added - breaking (existing duplicate values would violate constraint)\n if (oldField && newField && !(oldField.unique ?? false) && (newField.unique ?? false)) {\n return {\n typeName,\n fieldName,\n reason: \"Unique constraint added to field\",\n };\n }\n\n // Enum values removed - breaking (existing records may have removed values)\n if (oldField && newField && oldField.type === \"enum\" && newField.type === \"enum\") {\n const oldAllowed = oldField.allowedValues ?? [];\n const newAllowed = newField.allowedValues ?? [];\n const oldValues = oldAllowed.map((v) => v.value);\n const newValuesSet = new Set(newAllowed.map((v) => v.value));\n const removedValues = oldValues.filter((v) => !newValuesSet.has(v));\n if (removedValues.length > 0) {\n return {\n typeName,\n fieldName,\n reason: `Enum values removed: ${removedValues.join(\", \")}`,\n };\n }\n }\n\n return null;\n}\n\n/**\n * Context for collecting diff changes and breaking changes\n */\ninterface DiffContext {\n changes: DiffChange[];\n breakingChanges: BreakingChangeInfo[];\n}\n\nfunction addChange(\n ctx: DiffContext,\n change: DiffChange,\n oldField: SnapshotFieldConfig | undefined,\n newField: SnapshotFieldConfig | undefined,\n): void {\n ctx.changes.push(change);\n\n if (change.fieldName) {\n const breaking = isBreakingFieldChange(change.typeName, change.fieldName, oldField, newField);\n if (breaking) {\n ctx.breakingChanges.push(breaking);\n }\n }\n}\n\nfunction compareTypeFields(\n ctx: DiffContext,\n typeName: string,\n prevType: SnapshotType,\n currType: SnapshotType,\n): void {\n const prevFieldNames = new Set(Object.keys(prevType.fields));\n const currFieldNames = new Set(Object.keys(currType.fields));\n\n // Check for added fields\n for (const fieldName of currFieldNames) {\n if (!prevFieldNames.has(fieldName)) {\n addChange(\n ctx,\n {\n kind: \"field_added\",\n typeName,\n fieldName,\n after: currType.fields[fieldName],\n },\n undefined,\n currType.fields[fieldName],\n );\n }\n }\n\n // Check for removed fields\n for (const fieldName of prevFieldNames) {\n if (!currFieldNames.has(fieldName)) {\n addChange(\n ctx,\n {\n kind: \"field_removed\",\n typeName,\n fieldName,\n before: prevType.fields[fieldName],\n },\n prevType.fields[fieldName],\n undefined,\n );\n }\n }\n\n // Check for modified fields\n for (const fieldName of currFieldNames) {\n if (!prevFieldNames.has(fieldName)) continue;\n\n const prevField = prevType.fields[fieldName];\n const currField = currType.fields[fieldName];\n\n if (areFieldsDifferent(prevField, currField)) {\n addChange(\n ctx,\n {\n kind: \"field_modified\",\n typeName,\n fieldName,\n before: prevField,\n after: currField,\n },\n prevField,\n currField,\n );\n }\n }\n}\n\n/**\n * Compare type-level indexes\n * @param {DiffContext} ctx - Diff context\n * @param {string} typeName - Type name\n * @param {Record<string, SnapshotIndexConfig> | undefined} oldIndexes - Previous indexes\n * @param {Record<string, SnapshotIndexConfig> | undefined} newIndexes - Current indexes\n * @returns {void}\n */\nfunction compareIndexes(\n ctx: DiffContext,\n typeName: string,\n oldIndexes: Record<string, SnapshotIndexConfig> | undefined,\n newIndexes: Record<string, SnapshotIndexConfig> | undefined,\n): void {\n const oldKeys = new Set(Object.keys(oldIndexes || {}));\n const newKeys = new Set(Object.keys(newIndexes || {}));\n\n // Index added\n for (const indexName of newKeys) {\n if (!oldKeys.has(indexName)) {\n ctx.changes.push({\n kind: \"index_added\",\n typeName,\n indexName,\n after: newIndexes![indexName],\n });\n }\n }\n\n // Index removed\n for (const indexName of oldKeys) {\n if (!newKeys.has(indexName)) {\n ctx.changes.push({\n kind: \"index_removed\",\n typeName,\n indexName,\n before: oldIndexes![indexName],\n });\n }\n }\n\n // Index modified\n for (const indexName of newKeys) {\n if (oldKeys.has(indexName)) {\n const oldIndex = oldIndexes![indexName];\n const newIndex = newIndexes![indexName];\n\n const oldFieldsStr = JSON.stringify(oldIndex.fields.toSorted());\n const newFieldsStr = JSON.stringify(newIndex.fields.toSorted());\n\n if (oldFieldsStr !== newFieldsStr || oldIndex.unique !== newIndex.unique) {\n const reasons: string[] = [];\n if (oldFieldsStr !== newFieldsStr) reasons.push(\"fields changed\");\n if (oldIndex.unique !== newIndex.unique) reasons.push(\"unique constraint changed\");\n ctx.changes.push({\n kind: \"index_modified\",\n typeName,\n indexName,\n reason: reasons.join(\", \"),\n before: oldIndex,\n after: newIndex,\n });\n }\n }\n }\n}\n\n/**\n * Compare type-level file fields\n * @param {DiffContext} ctx - Diff context\n * @param {string} typeName - Type name\n * @param {Record<string, string> | undefined} oldFiles - Previous file fields\n * @param {Record<string, string> | undefined} newFiles - Current file fields\n * @returns {void}\n */\nfunction compareFiles(\n ctx: DiffContext,\n typeName: string,\n oldFiles: Record<string, string> | undefined,\n newFiles: Record<string, string> | undefined,\n): void {\n const oldKeys = new Set(Object.keys(oldFiles || {}));\n const newKeys = new Set(Object.keys(newFiles || {}));\n\n // File field added\n for (const fileName of newKeys) {\n if (!oldKeys.has(fileName)) {\n ctx.changes.push({\n kind: \"file_added\",\n typeName,\n fieldName: fileName,\n after: newFiles![fileName],\n });\n }\n }\n\n // File field removed\n for (const fileName of oldKeys) {\n if (!newKeys.has(fileName)) {\n ctx.changes.push({\n kind: \"file_removed\",\n typeName,\n fieldName: fileName,\n before: oldFiles![fileName],\n });\n }\n }\n\n // File field modified (description changed)\n for (const fileName of newKeys) {\n if (oldKeys.has(fileName)) {\n if (oldFiles![fileName] !== newFiles![fileName]) {\n ctx.changes.push({\n kind: \"file_modified\",\n typeName,\n fieldName: fileName,\n reason: \"description changed\",\n before: oldFiles![fileName],\n after: newFiles![fileName],\n });\n }\n }\n }\n}\n\n/**\n * Compare type-level relationships\n * @param {DiffContext} ctx - Diff context\n * @param {string} typeName - Type name\n * @param {\"forward\" | \"backward\"} relationshipType - Relationship direction to compare\n * @param {Record<string, SnapshotRelationship> | undefined} oldRelationships - Previous relationships\n * @param {Record<string, SnapshotRelationship> | undefined} newRelationships - Current relationships\n * @returns {void}\n */\nfunction compareRelationships(\n ctx: DiffContext,\n typeName: string,\n relationshipType: \"forward\" | \"backward\",\n oldRelationships: Record<string, SnapshotRelationship> | undefined,\n newRelationships: Record<string, SnapshotRelationship> | undefined,\n): void {\n const oldKeys = new Set(Object.keys(oldRelationships || {}));\n const newKeys = new Set(Object.keys(newRelationships || {}));\n\n // Relationship added\n for (const relName of newKeys) {\n if (!oldKeys.has(relName)) {\n ctx.changes.push({\n kind: \"relationship_added\",\n typeName,\n relationshipName: relName,\n relationshipType,\n after: newRelationships![relName],\n });\n }\n }\n\n // Relationship removed\n for (const relName of oldKeys) {\n if (!newKeys.has(relName)) {\n ctx.changes.push({\n kind: \"relationship_removed\",\n typeName,\n relationshipName: relName,\n relationshipType,\n before: oldRelationships![relName],\n });\n }\n }\n\n // Relationship modified\n for (const relName of newKeys) {\n if (oldKeys.has(relName)) {\n const oldRel = oldRelationships![relName];\n const newRel = newRelationships![relName];\n\n const reasons: string[] = [];\n if (oldRel.targetType !== newRel.targetType) reasons.push(\"targetType changed\");\n if (oldRel.targetField !== newRel.targetField) reasons.push(\"targetField changed\");\n if (oldRel.sourceField !== newRel.sourceField) reasons.push(\"sourceField changed\");\n if (oldRel.isArray !== newRel.isArray) reasons.push(\"isArray changed\");\n\n if (reasons.length > 0) {\n ctx.changes.push({\n kind: \"relationship_modified\",\n typeName,\n relationshipName: relName,\n relationshipType,\n reason: reasons.join(\", \"),\n before: oldRel,\n after: newRel,\n });\n }\n }\n }\n}\n\n/**\n * Compare type-level permissions\n * @param {DiffContext} ctx - Diff context\n * @param {string} typeName - Type name\n * @param {SnapshotRecordPermission | undefined} oldRecordPerm - Previous record permission\n * @param {SnapshotRecordPermission | undefined} newRecordPerm - Current record permission\n * @param {SnapshotGqlPermission | undefined} oldGqlPerm - Previous GQL permission\n * @param {SnapshotGqlPermission | undefined} newGqlPerm - Current GQL permission\n * @returns {void}\n */\nfunction comparePermissions(\n ctx: DiffContext,\n typeName: string,\n oldRecordPerm: SnapshotRecordPermission | undefined,\n newRecordPerm: SnapshotRecordPermission | undefined,\n oldGqlPerm: SnapshotGqlPermission | undefined,\n newGqlPerm: SnapshotGqlPermission | undefined,\n): void {\n // Compare record permissions\n const oldRecordStr = JSON.stringify(oldRecordPerm ?? null);\n const newRecordStr = JSON.stringify(newRecordPerm ?? null);\n const recordPermChanged = oldRecordStr !== newRecordStr;\n\n // Compare GQL permissions\n const oldGqlStr = JSON.stringify(oldGqlPerm ?? null);\n const newGqlStr = JSON.stringify(newGqlPerm ?? null);\n const gqlPermChanged = oldGqlStr !== newGqlStr;\n\n if (recordPermChanged || gqlPermChanged) {\n const reasons: string[] = [];\n if (recordPermChanged) reasons.push(\"record permission\");\n if (gqlPermChanged) reasons.push(\"GQL permission\");\n\n ctx.changes.push({\n kind: \"permission_modified\",\n typeName,\n reason: `${reasons.join(\" and \")} changed`,\n before: { recordPermission: oldRecordPerm, gqlPermission: oldGqlPerm },\n after: { recordPermission: newRecordPerm, gqlPermission: newGqlPerm },\n });\n }\n}\n\n/**\n * Compare two snapshots and generate a diff\n * @param {SchemaSnapshot} previous - Previous schema snapshot\n * @param {SchemaSnapshot} current - Current schema snapshot\n * @returns {MigrationDiff} Migration diff between snapshots\n */\nexport function compareSnapshots(previous: SchemaSnapshot, current: SchemaSnapshot): MigrationDiff {\n const ctx: DiffContext = { changes: [], breakingChanges: [] };\n\n const previousTypeNames = new Set(Object.keys(previous.types));\n const currentTypeNames = new Set(Object.keys(current.types));\n\n // Check for added types\n for (const typeName of currentTypeNames) {\n if (!previousTypeNames.has(typeName)) {\n ctx.changes.push({\n kind: \"type_added\",\n typeName,\n after: current.types[typeName],\n });\n }\n }\n\n // Check for removed types\n for (const typeName of previousTypeNames) {\n if (!currentTypeNames.has(typeName)) {\n ctx.changes.push({\n kind: \"type_removed\",\n typeName,\n before: previous.types[typeName],\n });\n }\n }\n\n // Check for modified types\n for (const typeName of currentTypeNames) {\n if (!previousTypeNames.has(typeName)) continue;\n\n const prevType = previous.types[typeName];\n const currType = current.types[typeName];\n\n // Compare fields\n compareTypeFields(ctx, typeName, prevType, currType);\n\n // Compare indexes\n compareIndexes(ctx, typeName, prevType.indexes, currType.indexes);\n\n // Compare file fields\n compareFiles(ctx, typeName, prevType.files, currType.files);\n\n // Compare relationships\n compareRelationships(\n ctx,\n typeName,\n \"forward\",\n prevType.forwardRelationships,\n currType.forwardRelationships,\n );\n compareRelationships(\n ctx,\n typeName,\n \"backward\",\n prevType.backwardRelationships,\n currType.backwardRelationships,\n );\n\n // Compare permissions\n comparePermissions(\n ctx,\n typeName,\n prevType.permissions?.record,\n currType.permissions?.record,\n prevType.permissions?.gql,\n currType.permissions?.gql,\n );\n }\n\n return {\n version: SCHEMA_SNAPSHOT_VERSION,\n namespace: current.namespace,\n createdAt: new Date().toISOString(),\n changes: ctx.changes,\n hasBreakingChanges: ctx.breakingChanges.length > 0,\n breakingChanges: ctx.breakingChanges,\n requiresMigrationScript: ctx.breakingChanges.length > 0,\n };\n}\n\n/**\n * Compare local types with a snapshot and generate a diff\n * @param {SchemaSnapshot} snapshot - Schema snapshot to compare against\n * @param {Record<string, TailorDBType>} localTypes - Local type definitions\n * @param {string} namespace - Namespace for comparison\n * @returns {MigrationDiff} Migration diff\n */\nexport function compareLocalTypesWithSnapshot(\n snapshot: SchemaSnapshot,\n localTypes: Record<string, TailorDBType>,\n namespace: string,\n): MigrationDiff {\n const currentSnapshot = createSnapshotFromLocalTypes(localTypes, namespace);\n return compareSnapshots(snapshot, currentSnapshot);\n}\n\n// ============================================================================\n// Snapshot Writing\n// ============================================================================\n\n/**\n * Write a schema snapshot to a file (creates directory structure)\n * @param {SchemaSnapshot} snapshot - Snapshot to write\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} num - Migration number\n * @returns {string} Path to the written file\n */\nexport function writeSnapshot(\n snapshot: SchemaSnapshot,\n migrationsDir: string,\n num: number,\n): string {\n const migrationDir = getMigrationDirPath(migrationsDir, num);\n fs.mkdirSync(migrationDir, { recursive: true });\n const filePath = getMigrationFilePath(migrationsDir, num, \"schema\");\n fs.writeFileSync(filePath, JSON.stringify(snapshot, null, 2));\n return filePath;\n}\n\n/**\n * Write a migration diff to a file (creates directory structure)\n * @param {MigrationDiff} diff - Diff to write\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} num - Migration number\n * @returns {string} Path to the written file\n */\nexport function writeDiff(diff: MigrationDiff, migrationsDir: string, num: number): string {\n const migrationDir = getMigrationDirPath(migrationsDir, num);\n fs.mkdirSync(migrationDir, { recursive: true });\n const filePath = getMigrationFilePath(migrationsDir, num, \"diff\");\n fs.writeFileSync(filePath, JSON.stringify(diff, null, 2));\n return filePath;\n}\n\n// ============================================================================\n// Migration Validation\n// ============================================================================\n\n/**\n * Validation error for migration files\n */\nexport interface MigrationValidationError {\n type: \"missing_schema\" | \"missing_diff\" | \"duplicate\" | \"gap\" | \"invalid_schema_number\";\n message: string;\n migrationNumber?: number;\n}\n\n/**\n * Validate migration files in a directory\n *\n * Checks:\n * - Schema file exists at 0000 (initial schema)\n * - No gaps in migration numbers\n * - No duplicate migration numbers (schema at 0000, diffs at 1+)\n * - Diff files exist for migrations 1+\n * @param {string} migrationsDir - Migrations directory path\n * @returns {MigrationValidationError[]} Array of validation errors (empty if valid)\n */\nexport function validateMigrationFiles(migrationsDir: string): MigrationValidationError[] {\n const errors: MigrationValidationError[] = [];\n\n if (!fs.existsSync(migrationsDir)) {\n // No migrations directory - this is valid (no migrations yet)\n return errors;\n }\n\n // Use getMigrationFiles to get directory-based migration files\n const migrationFiles = getMigrationFiles(migrationsDir);\n if (migrationFiles.length === 0) {\n // No migration files at all - valid\n return errors;\n }\n\n // Categorize files by type\n const schemaFiles: number[] = [];\n const diffFiles: number[] = [];\n\n for (const file of migrationFiles) {\n if (file.type === \"schema\") {\n schemaFiles.push(file.number);\n } else if (file.type === \"diff\") {\n diffFiles.push(file.number);\n }\n }\n\n // Check for schema file at INITIAL_SCHEMA_NUMBER (0000)\n if (!schemaFiles.includes(INITIAL_SCHEMA_NUMBER)) {\n errors.push({\n type: \"missing_schema\",\n message: `Initial schema snapshot (${formatMigrationNumber(\n INITIAL_SCHEMA_NUMBER,\n )}/schema.json) is missing`,\n migrationNumber: INITIAL_SCHEMA_NUMBER,\n });\n }\n\n // Check for schema files at wrong positions (only 0000 should have schema)\n for (const num of schemaFiles) {\n if (num !== INITIAL_SCHEMA_NUMBER) {\n errors.push({\n type: \"invalid_schema_number\",\n message: `Schema file found at migration ${formatMigrationNumber(\n num,\n )}, but schema should only exist at ${formatMigrationNumber(INITIAL_SCHEMA_NUMBER)}`,\n migrationNumber: num,\n });\n }\n }\n\n // Get all migration numbers\n const allNumbers = [...new Set([...schemaFiles, ...diffFiles])].sort((a, b) => a - b);\n\n if (allNumbers.length === 0) {\n return errors;\n }\n\n // Check for duplicate files (same number with both schema and diff, except for INITIAL_SCHEMA_NUMBER)\n for (const num of schemaFiles) {\n if (num !== INITIAL_SCHEMA_NUMBER && diffFiles.includes(num)) {\n errors.push({\n type: \"duplicate\",\n message: `Migration ${formatMigrationNumber(num)} has both schema and diff files`,\n migrationNumber: num,\n });\n }\n }\n\n // Check for gaps in sequence (from INITIAL_SCHEMA_NUMBER to max)\n const maxNum = Math.max(...allNumbers);\n for (let i = INITIAL_SCHEMA_NUMBER; i <= maxNum; i++) {\n if (!allNumbers.includes(i)) {\n errors.push({\n type: \"gap\",\n message: `Migration ${formatMigrationNumber(i)} is missing (gap in sequence)`,\n migrationNumber: i,\n });\n }\n }\n\n // Check that migrations > INITIAL_SCHEMA_NUMBER have diff files\n for (const num of allNumbers) {\n if (num > INITIAL_SCHEMA_NUMBER && !diffFiles.includes(num)) {\n errors.push({\n type: \"missing_diff\",\n message: `Migration ${formatMigrationNumber(num)} is missing diff file`,\n migrationNumber: num,\n });\n }\n }\n\n return errors;\n}\n\n/**\n * Validate migration files and throw if invalid\n * @param {string} migrationsDir - Migrations directory path\n * @param {string} namespace - Namespace for error messages\n * @throws {Error} If validation fails\n */\nexport function assertValidMigrationFiles(migrationsDir: string, namespace: string): void {\n const errors = validateMigrationFiles(migrationsDir);\n if (errors.length > 0) {\n const errorMessages = errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n throw new Error(\n `Migration file validation failed for namespace \"${namespace}\":\\n${errorMessages}`,\n );\n }\n}\n\n// ============================================================================\n// Remote Schema Verification\n// ============================================================================\n\n/**\n * Convert remote ParsedTailorDBType to SnapshotFieldConfig for comparison\n * @param {ProtoTailorDBType} remoteType - Remote TailorDB type from API\n * @returns {Record<string, SnapshotFieldConfig>} Converted field configs\n */\nfunction convertRemoteFieldsToSnapshot(\n remoteType: ProtoTailorDBType,\n): Record<string, SnapshotFieldConfig> {\n const fields: Record<string, SnapshotFieldConfig> = {};\n const remoteFields = remoteType.schema?.fields ?? {};\n\n for (const [fieldName, remoteField] of Object.entries(remoteFields)) {\n const config: SnapshotFieldConfig = {\n type: remoteField.type,\n required: remoteField.required,\n };\n\n if (remoteField.array) config.array = true;\n if (remoteField.index) config.index = true;\n if (remoteField.unique) config.unique = true;\n if (remoteField.foreignKey) {\n config.foreignKey = true;\n if (remoteField.foreignKeyType) config.foreignKeyType = remoteField.foreignKeyType;\n if (remoteField.foreignKeyField) config.foreignKeyField = remoteField.foreignKeyField;\n }\n if (remoteField.allowedValues && remoteField.allowedValues.length > 0) {\n config.allowedValues = remoteField.allowedValues.map((v) => ({\n value: v.value,\n ...(v.description && { description: v.description }),\n }));\n }\n\n if (remoteField.description) config.description = remoteField.description;\n if (remoteField.vector) config.vector = true;\n\n if (remoteField.hooks) {\n config.hooks = {};\n if (remoteField.hooks.create?.expr) {\n config.hooks.create = { expr: remoteField.hooks.create.expr };\n }\n if (remoteField.hooks.update?.expr) {\n config.hooks.update = { expr: remoteField.hooks.update.expr };\n }\n }\n\n if (remoteField.validate && remoteField.validate.length > 0) {\n config.validate = remoteField.validate.map((v) => ({\n script: { expr: v.script?.expr ?? \"\" },\n errorMessage: v.errorMessage ?? \"\",\n }));\n }\n\n if (remoteField.serial) {\n config.serial = {\n start: Number(remoteField.serial.start),\n ...(remoteField.serial.maxValue && { maxValue: Number(remoteField.serial.maxValue) }),\n ...(remoteField.serial.format && { format: remoteField.serial.format }),\n };\n }\n\n if (remoteField.scale !== undefined) config.scale = remoteField.scale;\n\n // TODO: Add nested field conversion when remote API supports it\n\n fields[fieldName] = config;\n }\n\n return fields;\n}\n\n/**\n * Compare a single field between remote and snapshot\n * @param {string} typeName - Name of the type\n * @param {string} fieldName - Name of the field\n * @param {SnapshotFieldConfig} remoteField - Remote field config\n * @param {SnapshotFieldConfig} snapshotField - Snapshot field config\n * @returns {SchemaDrift | null} Drift info or null if fields match\n */\nfunction compareFields(\n typeName: string,\n fieldName: string,\n remoteField: SnapshotFieldConfig,\n snapshotField: SnapshotFieldConfig,\n): SchemaDrift | null {\n const differences: string[] = [];\n\n // Compare type\n if (remoteField.type !== snapshotField.type) {\n differences.push(`type: remote=${remoteField.type}, expected=${snapshotField.type}`);\n }\n\n // Compare required\n if (remoteField.required !== snapshotField.required) {\n differences.push(\n `required: remote=${remoteField.required}, expected=${snapshotField.required}`,\n );\n }\n\n // Compare array\n const remoteArray = remoteField.array ?? false;\n const snapshotArray = snapshotField.array ?? false;\n if (remoteArray !== snapshotArray) {\n differences.push(`array: remote=${remoteArray}, expected=${snapshotArray}`);\n }\n\n // Compare unique\n const remoteUnique = remoteField.unique ?? false;\n const snapshotUnique = snapshotField.unique ?? false;\n if (remoteUnique !== snapshotUnique) {\n differences.push(`unique: remote=${remoteUnique}, expected=${snapshotUnique}`);\n }\n\n // Compare foreignKey\n const remoteFk = remoteField.foreignKey ?? false;\n const snapshotFk = snapshotField.foreignKey ?? false;\n if (remoteFk !== snapshotFk) {\n differences.push(`foreignKey: remote=${remoteFk}, expected=${snapshotFk}`);\n }\n\n // Compare foreignKeyType\n if (remoteField.foreignKeyType !== snapshotField.foreignKeyType) {\n differences.push(\n `foreignKeyType: remote=${remoteField.foreignKeyType ?? \"none\"}, expected=${snapshotField.foreignKeyType ?? \"none\"}`,\n );\n }\n\n const remoteAllowed = remoteField.allowedValues ?? [];\n const snapshotAllowed = snapshotField.allowedValues ?? [];\n const remoteAllowedValues = new Set(remoteAllowed.map((v) => v.value));\n const snapshotAllowedValues = new Set(snapshotAllowed.map((v) => v.value));\n if (remoteAllowedValues.size !== snapshotAllowedValues.size) {\n differences.push(\n `allowedValues count: remote=${remoteAllowedValues.size}, expected=${snapshotAllowedValues.size}`,\n );\n } else {\n for (const v of remoteAllowedValues) {\n if (!snapshotAllowedValues.has(v)) {\n differences.push(`allowedValues: remote has '${v}' not in snapshot`);\n break;\n }\n }\n for (const v of snapshotAllowedValues) {\n if (!remoteAllowedValues.has(v)) {\n differences.push(`allowedValues: snapshot has '${v}' not in remote`);\n break;\n }\n }\n }\n\n const remoteVector = remoteField.vector ?? false;\n const snapshotVector = snapshotField.vector ?? false;\n if (remoteVector !== snapshotVector) {\n differences.push(`vector: remote=${remoteVector}, expected=${snapshotVector}`);\n }\n\n if (remoteField.scale !== snapshotField.scale) {\n differences.push(`scale: remote=${remoteField.scale}, expected=${snapshotField.scale}`);\n }\n\n if (differences.length > 0) {\n return {\n typeName,\n kind: \"field_mismatch\",\n fieldName,\n details: differences.join(\"; \"),\n };\n }\n\n return null;\n}\n\n/**\n * System fields that are auto-generated and should be excluded from comparison\n */\nconst SYSTEM_FIELDS = new Set([\"id\"]);\n\n/**\n * Compare remote TailorDB types with a local snapshot\n * @param {ProtoTailorDBType[]} remoteTypes - Remote types from listParsedTailorDBTypes API\n * @param {SchemaSnapshot} snapshot - Local schema snapshot\n * @returns {SchemaDrift[]} List of drifts detected\n */\nexport function compareRemoteWithSnapshot(\n remoteTypes: ProtoTailorDBType[],\n snapshot: SchemaSnapshot,\n): SchemaDrift[] {\n const drifts: SchemaDrift[] = [];\n\n // Build maps for easy lookup\n const remoteTypeMap = new Map<string, ProtoTailorDBType>();\n for (const remoteType of remoteTypes) {\n remoteTypeMap.set(remoteType.name, remoteType);\n }\n\n const snapshotTypeNames = new Set(Object.keys(snapshot.types));\n const remoteTypeNames = new Set(remoteTypeMap.keys());\n\n // Check for types missing in remote\n for (const typeName of snapshotTypeNames) {\n if (!remoteTypeNames.has(typeName)) {\n drifts.push({\n typeName,\n kind: \"type_missing_remote\",\n details: `Type '${typeName}' exists in snapshot but not in remote`,\n });\n }\n }\n\n // Check for types missing in snapshot (unexpected types in remote)\n for (const typeName of remoteTypeNames) {\n if (!snapshotTypeNames.has(typeName)) {\n drifts.push({\n typeName,\n kind: \"type_missing_local\",\n details: `Type '${typeName}' exists in remote but not in snapshot`,\n });\n }\n }\n\n // Compare fields for types that exist in both\n for (const typeName of snapshotTypeNames) {\n if (!remoteTypeNames.has(typeName)) continue;\n\n const remoteType = remoteTypeMap.get(typeName)!;\n const snapshotType = snapshot.types[typeName];\n\n const remoteFields = convertRemoteFieldsToSnapshot(remoteType);\n const snapshotFields = snapshotType.fields;\n\n // Exclude system fields (like 'id') from comparison\n const remoteFieldNames = new Set(\n Object.keys(remoteFields).filter((f) => !SYSTEM_FIELDS.has(f)),\n );\n const snapshotFieldNames = new Set(\n Object.keys(snapshotFields).filter((f) => !SYSTEM_FIELDS.has(f)),\n );\n\n // Check for fields missing in remote\n for (const fieldName of snapshotFieldNames) {\n if (!remoteFieldNames.has(fieldName)) {\n drifts.push({\n typeName,\n kind: \"field_missing_remote\",\n fieldName,\n details: `Field '${fieldName}' exists in snapshot but not in remote`,\n });\n }\n }\n\n // Check for fields missing in snapshot\n for (const fieldName of remoteFieldNames) {\n if (!snapshotFieldNames.has(fieldName)) {\n drifts.push({\n typeName,\n kind: \"field_missing_local\",\n fieldName,\n details: `Field '${fieldName}' exists in remote but not in snapshot`,\n });\n }\n }\n\n // Compare fields that exist in both\n for (const fieldName of snapshotFieldNames) {\n if (!remoteFieldNames.has(fieldName)) continue;\n\n const drift = compareFields(\n typeName,\n fieldName,\n remoteFields[fieldName],\n snapshotFields[fieldName],\n );\n if (drift) {\n drifts.push(drift);\n }\n }\n }\n\n return drifts;\n}\n\n/**\n * Format schema drifts for display\n * @param {SchemaDrift[]} drifts - List of drifts to format\n * @returns {string} Formatted drift report\n */\nexport function formatSchemaDrifts(drifts: SchemaDrift[]): string {\n if (drifts.length === 0) {\n return \"No schema drifts detected.\";\n }\n\n const lines: string[] = [];\n\n // Group drifts by type\n const driftsByType = new Map<string, SchemaDrift[]>();\n for (const drift of drifts) {\n const existing = driftsByType.get(drift.typeName) ?? [];\n existing.push(drift);\n driftsByType.set(drift.typeName, existing);\n }\n\n for (const [typeName, typeDrifts] of driftsByType) {\n lines.push(` Type '${typeName}':`);\n for (const drift of typeDrifts) {\n if (drift.fieldName) {\n lines.push(` - Field '${drift.fieldName}': ${drift.details}`);\n } else {\n lines.push(` - ${drift.details}`);\n }\n }\n }\n\n return lines.join(\"\\n\");\n}\n","/**\n * Migration script bundler for TailorDB migrations\n *\n * Bundles migration scripts to be executed via TestExecScript API\n */\n\nimport * as fs from \"node:fs\";\nimport ml from \"multiline-ts\";\nimport * as path from \"pathe\";\nimport { resolveTSConfig } from \"pkg-types\";\nimport * as rolldown from \"rolldown\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\n\nexport interface MigrationBundleResult {\n namespace: string;\n migrationNumber: number;\n bundledCode: string;\n}\n\n/**\n * Bundle a single migration script\n *\n * Creates an entry that:\n * 1. Imports the migration script's main function\n * 2. Defines getDB() function inline\n * 3. Wraps migration in a transaction using getDB()\n * 4. Exports as main() for TestExecScript\n * @param {string} sourceFile - Path to the migration script file\n * @param {string} namespace - TailorDB namespace\n * @param {number} migrationNumber - Migration number\n * @returns {Promise<MigrationBundleResult>} Bundled migration result\n */\nexport async function bundleMigrationScript(\n sourceFile: string,\n namespace: string,\n migrationNumber: number,\n): Promise<MigrationBundleResult> {\n // Output directory in .tailor-sdk (relative to project root)\n const outputDir = path.resolve(getDistDir(), \"migrations\");\n fs.mkdirSync(outputDir, { recursive: true });\n\n // Entry file in output directory (consistent with resolver/executor bundlers)\n const entryPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.entry.js`);\n const outputPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.js`);\n\n const absoluteSourcePath = path.resolve(sourceFile).replace(/\\\\/g, \"/\");\n\n // Create entry file that wraps migration in a transaction\n // getDB function is defined inline to avoid dependency on generated types\n const entryContent = ml /* js */ `\n import { main as _migrationMain } from \"${absoluteSourcePath}\";\n import { Kysely, TailordbDialect } from \"@tailor-platform/sdk/kysely\";\n\n function getDB(namespace) {\n const client = new tailordb.Client({ namespace });\n return new Kysely({\n dialect: new TailordbDialect(client),\n });\n }\n\n export async function main(input) {\n const db = getDB(\"${namespace}\");\n await db.transaction().execute(async (trx) => {\n await _migrationMain(trx);\n });\n return { success: true };\n }\n `;\n fs.writeFileSync(entryPath, entryContent);\n\n let tsconfig: string | undefined;\n try {\n tsconfig = await resolveTSConfig();\n } catch {\n tsconfig = undefined;\n }\n\n // Bundle with tree-shaking\n await rolldown.build(\n rolldown.defineConfig({\n input: entryPath,\n output: {\n file: outputPath,\n format: \"esm\",\n sourcemap: false,\n minify: false,\n inlineDynamicImports: true,\n globals: {\n tailordb: \"tailordb\",\n },\n },\n external: [\"tailordb\"],\n resolve: {\n conditionNames: [\"node\", \"import\"],\n },\n tsconfig,\n treeshake: {\n moduleSideEffects: false,\n annotations: true,\n unknownGlobalSideEffects: false,\n },\n logLevel: \"silent\",\n }) as rolldown.BuildOptions,\n );\n\n // Read bundled output\n const bundledCode = fs.readFileSync(outputPath, \"utf-8\");\n\n // Entry file remains in output directory (consistent with resolver/executor bundlers)\n\n return {\n namespace,\n migrationNumber,\n bundledCode,\n };\n}\n","/**\n * Types for TailorDB migration execution\n */\n\nimport { formatMigrationNumber } from \"./snapshot\";\nimport type { MigrationDiff } from \"./diff-calculator\";\n\n// ============================================================================\n// Label Constants\n// ============================================================================\n\n/**\n * Maximum length for Kubernetes label values\n * Labels must match pattern: ^[a-z][a-z0-9_-]{0,62}\n */\nexport const MAX_LABEL_LENGTH = 63;\n\n/**\n * Prefix added to migration numbers in labels (required because migration names start with numbers)\n */\nexport const MIGRATION_LABEL_PREFIX = \"m\";\n\n/**\n * Label key for storing migration state in TailorDB Service metadata\n */\nexport const MIGRATION_LABEL_KEY = \"sdk-migration\";\n\n// ============================================================================\n// Error Constants\n// ============================================================================\n\n/**\n * Error patterns that indicate schema corruption\n */\nexport const SCHEMA_ERROR_PATTERNS = [\n \"failed to fetch schema\",\n \"sqlaccess error\",\n \"schema not found\",\n \"invalid schema\",\n] as const;\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Pending migration to be executed\n */\nexport interface PendingMigration {\n /** Migration number */\n number: number;\n /** Path to migration script file */\n scriptPath: string;\n /** Path to diff file */\n diffPath: string;\n /** Namespace this migration belongs to */\n namespace: string;\n /** Migrations directory path */\n migrationsDir: string;\n /** Migration diff content */\n diff: MigrationDiff;\n}\n\n// ============================================================================\n// Label Helper Functions\n// ============================================================================\n\n/**\n * Sanitize migration number for use as label value\n * Label pattern: ^[a-z][a-z0-9_-]{0,62}\n * - Must start with lowercase letter (add prefix since migration numbers start with digits)\n * - Max 63 characters\n * @param {number} migrationNumber - Migration number to sanitize\n * @returns {string} Sanitized label value\n */\nexport function sanitizeMigrationLabel(migrationNumber: number): string {\n const sanitized = MIGRATION_LABEL_PREFIX + formatMigrationNumber(migrationNumber);\n return sanitized.slice(0, MAX_LABEL_LENGTH);\n}\n\n/**\n * Parse migration number from label value\n * @param {string} label - Label value (e.g., \"m0001\")\n * @returns {number | null} Parsed number or null if invalid\n */\nexport function parseMigrationLabelNumber(label: string): number | null {\n if (!label.startsWith(MIGRATION_LABEL_PREFIX)) return null;\n const numStr = label.slice(MIGRATION_LABEL_PREFIX.length);\n const num = parseInt(numStr, 10);\n return isNaN(num) ? null : num;\n}\n\n// ============================================================================\n// Error Helper Functions\n// ============================================================================\n\n/**\n * Check if an error message indicates schema corruption\n * @param {string} errorMessage - Error message to check\n * @returns {boolean} True if error indicates schema corruption\n */\nexport function isSchemaError(errorMessage: string): boolean {\n const lowerMessage = errorMessage.toLowerCase();\n return SCHEMA_ERROR_PATTERNS.some((pattern) => lowerMessage.includes(pattern));\n}\n\n// ============================================================================\n// Remote Schema Verification Types\n// ============================================================================\n\n/**\n * Type of schema drift detected between remote and local snapshot\n */\nexport type SchemaDriftKind =\n | \"type_missing_remote\"\n | \"type_missing_local\"\n | \"field_missing_remote\"\n | \"field_missing_local\"\n | \"field_mismatch\";\n\n/**\n * Single schema drift item\n */\nexport interface SchemaDrift {\n typeName: string;\n kind: SchemaDriftKind;\n fieldName?: string;\n details: string;\n}\n\n/**\n * Result of remote schema verification for a single namespace\n */\nexport interface RemoteSchemaVerificationResult {\n namespace: string;\n remoteMigrationNumber: number;\n drifts: SchemaDrift[];\n hasDrift: boolean;\n}\n","/**\n * Script execution service for TestExecScript API\n *\n * Provides a reusable utility for executing scripts via the TestExecScript API\n * with polling for completion status.\n */\n\nimport { FunctionExecution_Status } from \"@tailor-proto/tailor/v1/function_resource_pb\";\nimport type { OperatorClient } from \"@/cli/shared/client\";\nimport type { AuthInvoker } from \"@tailor-proto/tailor/v1/auth_resource_pb\";\n\n/**\n * Default polling interval for script execution status in milliseconds (1 second)\n */\nexport const DEFAULT_POLL_INTERVAL = 1000;\n\n/**\n * Options for script execution\n */\nexport interface ScriptExecutionOptions {\n /** Operator client instance */\n client: OperatorClient;\n /** Workspace ID */\n workspaceId: string;\n /** Script name (for identification) */\n name: string;\n /** Bundled script code to execute */\n code: string;\n /** Optional JSON string argument to pass to the script */\n arg?: string;\n /** Auth invoker for script execution */\n invoker: AuthInvoker;\n /** Polling interval in milliseconds (default: 1000ms) */\n pollInterval?: number;\n}\n\n/**\n * Result of script execution\n */\nexport interface ScriptExecutionResult {\n /** Whether the script executed successfully */\n success: boolean;\n /** Logs output from the script execution */\n logs: string;\n /** Result value from the script execution */\n result: string;\n /** Error message if execution failed */\n error?: string;\n}\n\n/**\n * Result from waiting for execution completion\n */\nexport interface ExecutionWaitResult {\n /** Execution status */\n status: FunctionExecution_Status;\n /** Logs output from the execution */\n logs: string;\n /** Result value from the execution */\n result: string;\n}\n\n/**\n * Wait for a function execution to complete\n *\n * Polls the getFunctionExecution API until the execution reaches a terminal state\n * (SUCCESS or FAILED).\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {string} executionId - Execution ID to wait for\n * @param {number} [pollInterval] - Polling interval in milliseconds (default: 1000ms)\n * @returns {Promise<ExecutionWaitResult>} Execution result\n * @throws {Error} If execution is not found\n */\nexport async function waitForExecution(\n client: OperatorClient,\n workspaceId: string,\n executionId: string,\n pollInterval: number = DEFAULT_POLL_INTERVAL,\n): Promise<ExecutionWaitResult> {\n while (true) {\n const { execution } = await client.getFunctionExecution({\n workspaceId,\n executionId,\n });\n\n if (!execution) {\n throw new Error(`Execution '${executionId}' not found.`);\n }\n\n // Check for terminal states\n if (\n execution.status === FunctionExecution_Status.SUCCESS ||\n execution.status === FunctionExecution_Status.FAILED\n ) {\n return {\n status: execution.status,\n logs: execution.logs,\n result: execution.result,\n };\n }\n\n // Wait before polling again\n await new Promise((resolve) => setTimeout(resolve, pollInterval));\n }\n}\n\n/**\n * Execute a script via TestExecScript API and wait for completion\n *\n * This function:\n * 1. Calls testExecScript API to start execution\n * 2. Polls getFunctionExecution until completion\n * 3. Returns structured result with success/failure status\n * @param {ScriptExecutionOptions} options - Execution options\n * @returns {Promise<ScriptExecutionResult>} Execution result\n */\nexport async function executeScript(\n options: ScriptExecutionOptions,\n): Promise<ScriptExecutionResult> {\n const { client, workspaceId, name, code, arg, invoker, pollInterval } = options;\n\n // Execute the script\n const response = await client.testExecScript({\n workspaceId,\n name,\n code,\n arg: arg ?? JSON.stringify({}),\n invoker,\n });\n const executionId = response.executionId;\n\n // Wait for completion\n const result = await waitForExecution(client, workspaceId, executionId, pollInterval);\n\n if (result.status === FunctionExecution_Status.SUCCESS) {\n return {\n success: true,\n logs: result.logs,\n result: result.result,\n };\n } else {\n return {\n success: false,\n logs: result.logs,\n result: result.result || response.result,\n error: result.result || response.result || \"Script execution failed with unknown error\",\n };\n }\n}\n","/**\n * Migration execution service for TailorDB migrations\n *\n * Handles detection and execution of pending migration scripts via TestExecScript API.\n */\n\nimport * as fs from \"node:fs\";\nimport { create } from \"@bufbuild/protobuf\";\nimport { AuthInvokerSchema, type AuthInvoker } from \"@tailor-proto/tailor/v1/auth_resource_pb\";\nimport ora from \"ora\";\nimport { bundleMigrationScript } from \"@/cli/commands/tailordb/migrate/bundler\";\nimport { type NamespaceWithMigrations } from \"@/cli/commands/tailordb/migrate/config\";\nimport {\n loadDiff,\n getMigrationFiles,\n getMigrationFilePath,\n formatMigrationNumber,\n} from \"@/cli/commands/tailordb/migrate/snapshot\";\nimport {\n type PendingMigration,\n MIGRATION_LABEL_KEY,\n parseMigrationLabelNumber,\n} from \"@/cli/commands/tailordb/migrate/types\";\nimport { type OperatorClient } from \"@/cli/shared/client\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { executeScript } from \"@/cli/shared/script-executor\";\nimport { trnPrefix } from \"../label\";\nimport type { TailorDBServiceConfig } from \"@/types/tailordb.generated\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface MigrationExecutionOptions {\n client: OperatorClient;\n workspaceId: string;\n authInvoker: AuthInvoker;\n}\n\n/**\n * Context for migration execution with per-namespace configuration\n */\nexport interface MigrationContext {\n client: OperatorClient;\n workspaceId: string;\n authNamespace: string;\n machineUsers: string[] | undefined;\n dbConfig: Record<string, TailorDBServiceConfig | undefined>;\n}\n\ninterface ExecutionResult {\n namespace: string;\n migrationNumber: number;\n success: boolean;\n logs?: string;\n error?: string;\n}\n\n// ============================================================================\n// Migration Detection\n// ============================================================================\n\n/**\n * Get the current migration label from TailorDB Service metadata\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {string} namespace - TailorDB namespace\n * @returns {Promise<number>} Current migration number (0 if none)\n */\nasync function getCurrentMigrationNumber(\n client: OperatorClient,\n workspaceId: string,\n namespace: string,\n): Promise<number> {\n try {\n const trn = `${trnPrefix(workspaceId)}:tailordb:${namespace}`;\n\n const { metadata } = await client.getMetadata({ trn });\n\n const label = metadata?.labels[MIGRATION_LABEL_KEY];\n\n if (!label) {\n return 0;\n }\n const num = parseMigrationLabelNumber(label);\n return num ?? 0;\n } catch {\n return 0;\n }\n}\n\n/**\n * Detect pending migrations that need to be executed\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {NamespaceWithMigrations[]} namespacesWithMigrations - Namespaces with migrations config\n * @returns {Promise<PendingMigration[]>} List of pending migrations\n */\nexport async function detectPendingMigrations(\n client: OperatorClient,\n workspaceId: string,\n namespacesWithMigrations: NamespaceWithMigrations[],\n): Promise<PendingMigration[]> {\n const pendingMigrations: PendingMigration[] = [];\n\n for (const { namespace, migrationsDir } of namespacesWithMigrations) {\n // Get current applied migration number\n const currentMigration = await getCurrentMigrationNumber(client, workspaceId, namespace);\n\n // Get all migration files\n const migrationFiles = getMigrationFiles(migrationsDir);\n\n // Find migrations that haven't been applied yet\n for (const file of migrationFiles) {\n if (file.number <= currentMigration) {\n continue;\n }\n\n // Check for diff file (all migrations must have a diff)\n const diffPath = getMigrationFilePath(migrationsDir, file.number, \"diff\");\n if (!fs.existsSync(diffPath)) {\n continue;\n }\n\n // Load the diff to check if migration script is required\n const diff = loadDiff(diffPath);\n\n // Check for migration script (only required for breaking changes)\n const scriptPath = getMigrationFilePath(migrationsDir, file.number, \"migrate\");\n if (diff.requiresMigrationScript && !fs.existsSync(scriptPath)) {\n logger.warn(\n `Migration ${namespace}/${file.number} requires a script but migrate.ts not found`,\n );\n continue;\n }\n\n pendingMigrations.push({\n number: file.number,\n scriptPath, // May not exist for non-breaking changes\n diffPath,\n namespace,\n migrationsDir,\n diff,\n });\n }\n }\n\n // Sort by namespace and migration number\n pendingMigrations.sort((a, b) => {\n if (a.namespace !== b.namespace) {\n return a.namespace.localeCompare(b.namespace);\n }\n return a.number - b.number;\n });\n\n return pendingMigrations;\n}\n\n// ============================================================================\n// Migration Execution\n// ============================================================================\n\n/**\n * Execute a single migration script\n * @param {MigrationExecutionOptions} options - Execution options\n * @param {PendingMigration} migration - Migration to execute\n * @returns {Promise<ExecutionResult>} Execution result\n */\nasync function executeSingleMigration(\n options: MigrationExecutionOptions,\n migration: PendingMigration,\n): Promise<ExecutionResult> {\n const { client, workspaceId, authInvoker } = options;\n\n const migrationName = `migration-${migration.namespace}-${formatMigrationNumber(migration.number)}.js`;\n\n // Bundle the migration script\n const bundleResult = await bundleMigrationScript(\n migration.scriptPath,\n migration.namespace,\n migration.number,\n );\n\n // Execute the script using the shared script executor\n const result = await executeScript({\n client,\n workspaceId,\n name: migrationName,\n code: bundleResult.bundledCode,\n invoker: authInvoker,\n });\n\n return {\n namespace: migration.namespace,\n migrationNumber: migration.number,\n success: result.success,\n logs: result.logs,\n error: result.error,\n };\n}\n\n/**\n * Update the migration label on TailorDB Service metadata\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {string} namespace - TailorDB namespace\n * @param {number} migrationNumber - Migration number to set\n * @returns {Promise<void>}\n */\nexport async function updateMigrationLabel(\n client: OperatorClient,\n workspaceId: string,\n namespace: string,\n migrationNumber: number,\n): Promise<void> {\n const trn = `${trnPrefix(workspaceId)}:tailordb:${namespace}`;\n\n // Get existing metadata\n const { metadata } = await client.getMetadata({ trn });\n const existingLabels = metadata?.labels ?? {};\n\n const newLabel = `m${formatMigrationNumber(migrationNumber)}`;\n\n // Update with new migration label\n await client.setMetadata({\n trn,\n labels: {\n ...existingLabels,\n [MIGRATION_LABEL_KEY]: newLabel,\n },\n });\n}\n\n/**\n * Execute all pending migrations, grouping by namespace and using appropriate machine user\n * @param {MigrationContext} context - Migration context with per-namespace configuration\n * @param {PendingMigration[]} migrations - Migrations to execute\n * @returns {Promise<void>}\n */\nexport async function executeMigrations(\n context: MigrationContext,\n migrations: PendingMigration[],\n): Promise<void> {\n // Filter migrations that require script execution\n const migrationsWithScripts = migrations.filter((m) => m.diff.requiresMigrationScript);\n\n if (migrationsWithScripts.length === 0) {\n return;\n }\n\n // Group migrations by namespace\n const migrationsByNamespace = groupMigrationsByNamespace(migrationsWithScripts);\n\n // Execute migrations for each namespace with appropriate machine user\n for (const [namespace, namespaceMigrations] of migrationsByNamespace) {\n const dbConfig = context.dbConfig[namespace];\n const migrationConfig = dbConfig?.migration;\n\n // Get machine user name for this namespace\n const machineUserName = getMigrationMachineUser(migrationConfig, context.machineUsers);\n if (!machineUserName) {\n throw new Error(\n `No machine user available for migration execution in namespace '${namespace}'. ` +\n \"Either configure 'migration.machineUser' in db config or define machine users in auth config.\",\n );\n }\n\n // Create authInvoker for this namespace\n const authInvoker = create(AuthInvokerSchema, {\n namespace: context.authNamespace,\n machineUserName,\n });\n\n const options: MigrationExecutionOptions = {\n client: context.client,\n workspaceId: context.workspaceId,\n authInvoker,\n };\n\n logger.info(`Using machine user: ${styles.bold(machineUserName)} for namespace '${namespace}'`);\n\n for (const migration of namespaceMigrations) {\n const migrationLabel = `${migration.namespace}/${formatMigrationNumber(migration.number)}`;\n const spinner = ora({\n text: `Executing migration ${migrationLabel}...`,\n prefixText: \"\",\n }).start();\n\n const result = await executeSingleMigration(options, migration);\n\n if (result.success) {\n spinner.succeed(`Migration ${migrationLabel} completed successfully`);\n\n // Show logs if any\n if (result.logs && result.logs.trim()) {\n logger.log(`Logs:\\n${result.logs}`);\n }\n } else {\n spinner.fail(`Migration ${migrationLabel} failed`);\n if (result.logs) {\n logger.error(`Logs:\\n${result.logs}`);\n }\n throw new Error(result.error ?? \"Migration failed\");\n }\n }\n }\n}\n\n/**\n * Get the machine user name for migration execution\n *\n * Priority:\n * 1. machineUser from migration config (if set)\n * 2. First machine user from auth config\n * @param {object | undefined} migrationConfig - Migration config for namespace\n * @param {string[] | undefined} machineUsers - Machine users from auth config\n * @returns {string | undefined} Machine user name or undefined if none available\n */\nexport function getMigrationMachineUser(\n migrationConfig: { machineUser?: string } | undefined,\n machineUsers: string[] | undefined,\n): string | undefined {\n // Priority 1: Explicit config\n if (migrationConfig?.machineUser) {\n return migrationConfig.machineUser;\n }\n\n // Priority 2: First machine user from auth\n if (machineUsers && machineUsers.length > 0) {\n return machineUsers[0];\n }\n\n return undefined;\n}\n\n/**\n * Group migrations by namespace\n * @param {PendingMigration[]} migrations - Migrations to group\n * @returns {Map<string, PendingMigration[]>} Migrations grouped by namespace\n */\nexport function groupMigrationsByNamespace(\n migrations: PendingMigration[],\n): Map<string, PendingMigration[]> {\n const grouped = new Map<string, PendingMigration[]>();\n for (const migration of migrations) {\n const existing = grouped.get(migration.namespace) ?? [];\n existing.push(migration);\n grouped.set(migration.namespace, existing);\n }\n return grouped;\n}\n","import { fromJson, type MessageInitShape } from \"@bufbuild/protobuf\";\nimport { ValueSchema } from \"@bufbuild/protobuf/wkt\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n type CreateTailorDBGQLPermissionRequestSchema,\n type CreateTailorDBServiceRequestSchema,\n type CreateTailorDBTypeRequestSchema,\n type DeleteTailorDBGQLPermissionRequestSchema,\n type DeleteTailorDBServiceRequestSchema,\n type DeleteTailorDBTypeRequestSchema,\n type UpdateTailorDBGQLPermissionRequestSchema,\n type UpdateTailorDBTypeRequestSchema,\n} from \"@tailor-proto/tailor/v1/tailordb_pb\";\nimport {\n TailorDBGQLPermission_Action,\n type TailorDBGQLPermission_ConditionSchema,\n type TailorDBGQLPermission_OperandSchema,\n TailorDBGQLPermission_Operator,\n TailorDBGQLPermission_Permit,\n type TailorDBGQLPermission_PolicySchema,\n type TailorDBGQLPermissionSchema,\n type TailorDBType as ProtoTailorDBType,\n type TailorDBType_FieldConfigSchema,\n type TailorDBType_FileConfigSchema,\n type TailorDBType_IndexSchema,\n type TailorDBType_Permission_ConditionSchema,\n type TailorDBType_Permission_OperandSchema,\n TailorDBType_Permission_Operator,\n TailorDBType_Permission_Permit,\n type TailorDBType_Permission_PolicySchema,\n type TailorDBType_PermissionSchema,\n TailorDBType_PermitAction,\n type TailorDBType_RelationshipConfigSchema,\n type TailorDBTypeSchema,\n} from \"@tailor-proto/tailor/v1/tailordb_resource_pb\";\nimport * as inflection from \"inflection\";\nimport * as path from \"pathe\";\nimport {\n getNamespacesWithMigrations,\n type NamespaceWithMigrations,\n} from \"@/cli/commands/tailordb/migrate/config\";\nimport {\n hasChanges,\n formatMigrationDiff,\n formatDiffSummary,\n type MigrationDiff,\n type DiffChange,\n} from \"@/cli/commands/tailordb/migrate/diff-calculator\";\nimport {\n reconstructSnapshotFromMigrations,\n compareLocalTypesWithSnapshot,\n assertValidMigrationFiles,\n formatMigrationNumber,\n compareRemoteWithSnapshot,\n formatSchemaDrifts,\n} from \"@/cli/commands/tailordb/migrate/snapshot\";\nimport { type TailorDBService } from \"@/cli/services/tailordb/service\";\nimport { fetchAll, type OperatorClient } from \"@/cli/shared/client\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { createChangeSet } from \"../change-set\";\nimport { buildMetaRequest, sdkNameLabelKey, trnPrefix, type WithLabel } from \"../label\";\nimport {\n executeMigrations,\n detectPendingMigrations,\n updateMigrationLabel,\n type MigrationContext,\n} from \"./migration\";\nimport type { ApplyPhase, PlanContext } from \"../apply\";\nimport type { OwnerConflict, UnmanagedResource } from \"../confirm\";\nimport type {\n PendingMigration,\n RemoteSchemaVerificationResult,\n} from \"@/cli/commands/tailordb/migrate/types\";\nimport type { LoadedConfig } from \"@/cli/shared/config-loader\";\nimport type { Executor } from \"@/types/executor.generated\";\nimport type {\n EnumValue,\n PermissionOperand,\n StandardActionPermission,\n StandardGqlPermissionPolicy,\n StandardPermissionCondition,\n StandardTailorTypeGqlPermission,\n StandardTailorTypePermission,\n OperatorFieldConfig,\n TailorDBType,\n} from \"@/types/tailordb\";\nimport type { GqlOperations, TailorDBServiceConfig } from \"@/types/tailordb.generated\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n// ============================================================================\n// Remote Schema Verification\n// ============================================================================\n\n/**\n * Fetch all TailorDB types from remote for a namespace\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {string} namespace - TailorDB namespace\n * @returns {Promise<ProtoTailorDBType[]>} Remote TailorDB types\n */\nasync function fetchRemoteTypes(\n client: OperatorClient,\n workspaceId: string,\n namespace: string,\n): Promise<ProtoTailorDBType[]> {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({\n workspaceId,\n namespaceName: namespace,\n pageToken,\n pageSize: maxPageSize,\n });\n return [tailordbTypes, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n}\n\n/**\n * Get the current migration number from remote metadata\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {string} namespace - TailorDB namespace\n * @returns {Promise<number | null>} Current migration number, or null if no migration label exists\n */\nasync function getRemoteMigrationNumber(\n client: OperatorClient,\n workspaceId: string,\n namespace: string,\n): Promise<number | null> {\n try {\n const trn = `${trnPrefix(workspaceId)}:tailordb:${namespace}`;\n const { metadata } = await client.getMetadata({ trn });\n const label = metadata?.labels?.[\"sdk-migration\"];\n if (!label) return null; // No migration label means first apply\n const match = label.match(/^m(\\d+)$/);\n return match ? parseInt(match[1], 10) : null;\n } catch {\n return null;\n }\n}\n\n/**\n * Verify remote schema matches the expected snapshot state\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {NamespaceWithMigrations[]} namespacesWithMigrations - Namespaces with migration config\n * @returns {Promise<RemoteSchemaVerificationResult[]>} Verification results per namespace\n */\nasync function verifyRemoteSchema(\n client: OperatorClient,\n workspaceId: string,\n namespacesWithMigrations: NamespaceWithMigrations[],\n): Promise<RemoteSchemaVerificationResult[]> {\n const results: RemoteSchemaVerificationResult[] = [];\n\n for (const { namespace, migrationsDir } of namespacesWithMigrations) {\n // Get current remote migration number\n const remoteMigrationNumber = await getRemoteMigrationNumber(client, workspaceId, namespace);\n\n // If no migration label exists, this is likely a first apply - skip verification\n // Remote verification only makes sense when there's an established migration history\n if (remoteMigrationNumber === null) {\n results.push({\n namespace,\n remoteMigrationNumber: 0,\n drifts: [],\n hasDrift: false,\n });\n continue;\n }\n\n // Reconstruct snapshot at the remote migration version\n const expectedSnapshot = reconstructSnapshotFromMigrations(\n migrationsDir,\n remoteMigrationNumber,\n );\n if (!expectedSnapshot) {\n // No snapshots exist - skip verification\n results.push({\n namespace,\n remoteMigrationNumber,\n drifts: [],\n hasDrift: false,\n });\n continue;\n }\n\n // Fetch remote types\n const remoteTypes = await fetchRemoteTypes(client, workspaceId, namespace);\n\n // Compare remote with expected snapshot\n const drifts = compareRemoteWithSnapshot(remoteTypes, expectedSnapshot);\n\n results.push({\n namespace,\n remoteMigrationNumber,\n drifts,\n hasDrift: drifts.length > 0,\n });\n }\n\n return results;\n}\n\n/**\n * Format remote schema verification results for display\n * @param {RemoteSchemaVerificationResult[]} results - Verification results\n * @returns {string} Formatted results string\n */\nfunction formatRemoteVerificationResults(results: RemoteSchemaVerificationResult[]): string {\n const lines: string[] = [];\n\n for (const result of results) {\n if (!result.hasDrift) continue;\n\n lines.push(`Namespace: ${result.namespace}`);\n lines.push(` Remote migration: ${formatMigrationNumber(result.remoteMigrationNumber)}`);\n lines.push(` Differences:`);\n lines.push(formatSchemaDrifts(result.drifts));\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n\n// ============================================================================\n// Migration Validation\n// ============================================================================\n\n/**\n * Validate migration files and detect pending migrations\n * @param {OperatorClient} client - Operator client instance\n * @param {string} workspaceId - Workspace ID\n * @param {ReadonlyMap<string, Record<string, TailorDBType>>} typesByNamespace - Types by namespace\n * @param {LoadedConfig} config - Loaded application config (includes path)\n * @param {boolean} noSchemaCheck - Whether to skip schema diff check\n * @returns {Promise<PendingMigration[]>} List of pending migrations\n */\nasync function validateAndDetectMigrations(\n client: OperatorClient,\n workspaceId: string,\n typesByNamespace: ReadonlyMap<string, Record<string, TailorDBType>>,\n config: LoadedConfig,\n noSchemaCheck: boolean,\n): Promise<PendingMigration[]> {\n const configDir = path.dirname(config.path);\n const namespacesWithMigrations = getNamespacesWithMigrations(config, configDir);\n let pendingMigrations: PendingMigration[] = [];\n\n if (namespacesWithMigrations.length > 0) {\n // Validate migration file integrity (sequential numbers, no gaps, no duplicates)\n for (const { namespace, migrationsDir } of namespacesWithMigrations) {\n assertValidMigrationFiles(migrationsDir, namespace);\n }\n\n // Check for schema diffs if not skipped\n if (!noSchemaCheck) {\n // 1. Check local types vs local snapshot (existing check)\n const migrationResults = await checkMigrationDiffs(\n typesByNamespace,\n namespacesWithMigrations,\n );\n const hasDiffs = migrationResults.some((r) => r.hasDiff);\n\n if (hasDiffs) {\n logger.error(\"Schema changes detected that are not in migration files:\");\n logger.log(formatMigrationCheckResults(migrationResults));\n logger.newline();\n logger.info(\"Run 'tailor-sdk tailordb migration generate' to create migration files.\");\n logger.info(\"Or use '--no-schema-check' to skip this check.\");\n throw new Error(\"Schema migration check failed\");\n }\n\n // 2. Check remote schema vs local snapshot (new check)\n const remoteVerificationResults = await verifyRemoteSchema(\n client,\n workspaceId,\n namespacesWithMigrations,\n );\n const hasRemoteDrift = remoteVerificationResults.some((r) => r.hasDrift);\n\n if (hasRemoteDrift) {\n logger.error(\"Remote schema drift detected:\");\n logger.log(formatRemoteVerificationResults(remoteVerificationResults));\n logger.newline();\n logger.info(\"This may indicate:\");\n logger.info(\" - Another developer applied different migrations\", { mode: \"plain\" });\n logger.info(\" - Manual schema changes were made directly\", { mode: \"plain\" });\n logger.info(\" - Migration history is out of sync\", { mode: \"plain\" });\n logger.newline();\n logger.info(\"Use '--no-schema-check' to skip this check (not recommended).\");\n throw new Error(\"Remote schema verification failed\");\n }\n }\n\n // Detect pending migrations (migration scripts that haven't been executed yet)\n pendingMigrations = await detectPendingMigrations(\n client,\n workspaceId,\n namespacesWithMigrations,\n );\n\n if (pendingMigrations.length > 0) {\n logger.newline();\n\n // Classify migrations by whether they require migration scripts\n const withScripts = pendingMigrations.filter((m) => m.diff.requiresMigrationScript);\n const withoutScripts = pendingMigrations.filter((m) => !m.diff.requiresMigrationScript);\n\n logger.info(`Applying ${pendingMigrations.length} migration(s):`);\n if (withoutScripts.length > 0) {\n logger.info(\n ` • ${withoutScripts.length} schema change(s) (applied automatically with schema deployment)`,\n { mode: \"plain\" },\n );\n }\n if (withScripts.length > 0) {\n logger.info(\n ` • ${withScripts.length} data migration(s) (requires migration script execution)`,\n { mode: \"plain\" },\n );\n }\n }\n }\n\n return pendingMigrations;\n}\n\n/**\n * Build migration execution context for script-based migrations.\n * @param client - Operator client instance\n * @param migrationContext - Planned TailorDB context\n * @param migrationsRequiringScripts - Migrations that require scripts\n * @returns Migration context for script execution\n */\nfunction buildMigrationContextForScripts(\n client: OperatorClient,\n migrationContext: Awaited<ReturnType<typeof planTailorDB>>[\"context\"],\n migrationsRequiringScripts: PendingMigration[],\n): MigrationContext {\n const authService = migrationContext.application.authService;\n if (!authService) {\n throw new Error(\"Auth configuration is required to execute migration scripts.\");\n }\n\n const dbConfigMap: Record<string, TailorDBServiceConfig | undefined> = {};\n for (const migration of migrationsRequiringScripts) {\n if (!(migration.namespace in dbConfigMap)) {\n dbConfigMap[migration.namespace] = migrationContext.config.db?.[migration.namespace] as\n | TailorDBServiceConfig\n | undefined;\n }\n }\n\n return {\n client,\n workspaceId: migrationContext.workspaceId,\n authNamespace: authService.config.name,\n machineUsers: authService.config.machineUsers\n ? Object.keys(authService.config.machineUsers)\n : undefined,\n dbConfig: dbConfigMap,\n };\n}\n\n/**\n * Apply TailorDB-related changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned TailorDB changes\n * @param phase - Apply phase (defaults to \"create-update\")\n */\nexport async function applyTailorDB(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planTailorDB>>,\n phase: Exclude<ApplyPhase, \"delete\"> = \"create-update\",\n): Promise<void> {\n const { changeSet, context: migrationContext } = result;\n\n if (phase === \"create-update\") {\n let pendingMigrations: PendingMigration[] = [];\n\n // Validate and detect migrations\n // Build types by namespace map\n const typesByNamespace = new Map<string, Record<string, TailorDBType>>();\n for (const tailordb of migrationContext.application.tailorDBServices) {\n const types = tailordb.types;\n if (types) {\n typesByNamespace.set(tailordb.namespace, types);\n }\n }\n\n pendingMigrations = await validateAndDetectMigrations(\n client,\n migrationContext.workspaceId,\n typesByNamespace,\n migrationContext.config,\n migrationContext.noSchemaCheck,\n );\n\n if (pendingMigrations.length > 0) {\n // Migration flow: Execute each migration sequentially (pre -> script -> post)\n // This ensures intermediate states are properly handled when scripts depend on them\n\n // Reset tracking state for this migration run\n processedTypes.reset();\n deletedResources.reset();\n\n // Step 1: Create/update services once at the beginning (services don't need per-migration handling)\n await executeServicesCreation(client, changeSet);\n\n const migrationsRequiringScripts = pendingMigrations.filter(\n (m) => m.diff.requiresMigrationScript,\n );\n\n // Step 2: Build migration context for script execution (if any migrations require scripts)\n const migrationCtx =\n migrationsRequiringScripts.length > 0\n ? buildMigrationContextForScripts(client, migrationContext, migrationsRequiringScripts)\n : undefined;\n\n // Step 3: Execute each migration sequentially: pre -> script -> post\n if (migrationsRequiringScripts.length > 0) {\n logger.info(`Executing ${migrationsRequiringScripts.length} data migration(s)...`);\n logger.newline();\n }\n\n for (const migration of pendingMigrations) {\n // Pre-migration phase: Create/update types with breaking fields as optional\n await executeSingleMigrationPrePhase(client, changeSet, migration);\n\n // Script execution (only if this migration requires a script)\n if (migration.diff.requiresMigrationScript && migrationCtx) {\n await executeMigrations(migrationCtx, [migration]);\n }\n\n // Post-migration phase: Apply final types (required: true) and deletions\n await executeSingleMigrationPostPhase(client, changeSet, migration);\n\n // Update migration label only after all phases complete successfully\n await updateMigrationLabel(\n client,\n migrationContext.workspaceId,\n migration.namespace,\n migration.number,\n );\n }\n\n if (migrationsRequiringScripts.length > 0) {\n logger.newline();\n logger.success(`All data migrations completed successfully.`);\n }\n\n // Step 4: Delete remaining GQL permissions that weren't deleted with their types\n const remainingGqlPermissionDeletes = changeSet.gqlPermission.deletes.filter((del) => {\n const permKey = `${del.request.namespaceName}/${del.name}`;\n return !deletedResources.gqlPermissions.has(permKey);\n });\n if (remainingGqlPermissionDeletes.length > 0) {\n await Promise.all(\n remainingGqlPermissionDeletes.map((del) =>\n client.deleteTailorDBGQLPermission(del.request),\n ),\n );\n }\n } else {\n // Normal create-update flow without migrations\n // Services\n await Promise.all([\n ...changeSet.service.creates.map(async (create) => {\n await client.createTailorDBService(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.service.updates.map((update) => client.setMetadata(update.metaRequest)),\n ]);\n\n // Types\n try {\n await Promise.all([\n ...changeSet.type.creates.map((create) => client.createTailorDBType(create.request)),\n ...changeSet.type.updates.map((update) => client.updateTailorDBType(update.request)),\n ]);\n } catch (error) {\n handleOptionalToRequiredError(error, [\n \"Run 'tailor-sdk tailordb migration generate' to create migration files.\",\n \"Migration scripts allow you to handle existing data before applying the schema change.\",\n ]);\n }\n\n // GQLPermissions\n await Promise.all([\n ...changeSet.gqlPermission.creates.map((create) =>\n client.createTailorDBGQLPermission(create.request),\n ),\n ...changeSet.gqlPermission.updates.map((update) =>\n client.updateTailorDBGQLPermission(update.request),\n ),\n ]);\n\n // Delete resources (only when no migrations occurred)\n // Migrations already handle deletions in post-migration phase\n await Promise.all(\n changeSet.gqlPermission.deletes.map((del) =>\n client.deleteTailorDBGQLPermission(del.request),\n ),\n );\n await Promise.all(\n changeSet.type.deletes.map((del) => client.deleteTailorDBType(del.request)),\n );\n }\n } else if (phase === \"delete-resources\") {\n // Delete GQL permissions first, then types\n await Promise.all(\n changeSet.gqlPermission.deletes.map((del) => client.deleteTailorDBGQLPermission(del.request)),\n );\n await Promise.all(changeSet.type.deletes.map((del) => client.deleteTailorDBType(del.request)));\n } else if (phase === \"delete-services\") {\n // Services only\n await Promise.all(\n changeSet.service.deletes.map((del) => client.deleteTailorDBService(del.request)),\n );\n }\n}\n\n// ============================================================================\n// Error Handling Helpers\n// ============================================================================\n\n/**\n * Handle optional-to-required field change error with helpful message\n * @param {unknown} error - Error to handle\n * @param {string[]} messages - Additional messages to display\n */\nfunction handleOptionalToRequiredError(error: unknown, messages: string[]): never {\n if (\n error instanceof ConnectError &&\n error.code === Code.FailedPrecondition &&\n error.message.includes(\"cannot be updated from non-required to required when records exist\")\n ) {\n logger.error(\n \"Schema change failed: Cannot change field from optional to required when records exist.\",\n );\n logger.newline();\n for (const message of messages) {\n logger.info(message);\n }\n }\n throw error;\n}\n\n// ============================================================================\n// Pre-Migration Support\n// ============================================================================\n\n/**\n * Map of breaking changes: typeName -> fieldName -> change kind\n */\ntype BreakingChangesMap = Map<string, Map<string, DiffChange>>;\n\n/**\n * Build a map of breaking field changes from pending migrations\n * @param {PendingMigration[]} pendingMigrations - Pending migrations\n * @returns {BreakingChangesMap} Map of breaking changes\n */\nfunction buildBreakingChangesMap(pendingMigrations: PendingMigration[]): BreakingChangesMap {\n const map: BreakingChangesMap = new Map();\n\n for (const migration of pendingMigrations) {\n for (const change of migration.diff.changes) {\n // We care about field changes that affect required status\n if (\n change.kind === \"field_added\" ||\n change.kind === \"field_modified\" ||\n change.kind === \"field_removed\"\n ) {\n if (!change.fieldName) continue;\n\n if (!map.has(change.typeName)) {\n map.set(change.typeName, new Map());\n }\n map.get(change.typeName)!.set(change.fieldName, change);\n }\n }\n }\n\n return map;\n}\n\n/**\n * Field config type for breaking change detection\n */\ninterface FieldConfig {\n required?: boolean;\n unique?: boolean;\n allowedValues?: EnumValue[];\n}\n\n/**\n * Apply pre-migration schema adjustments to avoid breaking changes before scripts run.\n * @param fields - Field configs to adjust\n * @param typeChanges - Breaking changes for a type\n */\nfunction applyPreMigrationFieldAdjustments(\n fields: Record<string, MessageInitShape<typeof TailorDBType_FieldConfigSchema>>,\n typeChanges: Map<string, DiffChange>,\n): void {\n for (const [fieldName, change] of typeChanges) {\n const field = fields[fieldName];\n if (!field) continue;\n\n const before = change.before as FieldConfig | undefined;\n const after = change.after as FieldConfig | undefined;\n\n if (change.kind === \"field_added\" && after?.required) {\n field.required = false;\n }\n\n if (change.kind !== \"field_modified\") {\n continue;\n }\n\n // Optional to required\n if (!before?.required && after?.required) {\n field.required = false;\n }\n\n // Unique constraint added\n if (!(before?.unique ?? false) && (after?.unique ?? false)) {\n field.unique = false;\n }\n\n // Enum values removed: keep old values + add new values (union)\n if (before?.allowedValues && after?.allowedValues) {\n const afterValues = new Set(after.allowedValues.map((v) => v.value));\n const removedValues = before.allowedValues.filter((v) => !afterValues.has(v.value));\n if (removedValues.length > 0) {\n // Create union of all values, preserving descriptions where available\n const valueMap = new Map<string, string>();\n for (const v of before.allowedValues) {\n valueMap.set(v.value, v.description ?? \"\");\n }\n for (const v of after.allowedValues) {\n if (!valueMap.has(v.value)) {\n valueMap.set(v.value, v.description ?? \"\");\n }\n }\n field.allowedValues = Array.from(valueMap.entries()).map(([value, description]) => ({\n value,\n description,\n }));\n }\n }\n }\n}\n\n// ============================================================================\n// Migration Execution Helpers\n// ============================================================================\n\ntype TailorDBChangeSet = Awaited<ReturnType<typeof planTailorDB>>[\"changeSet\"];\n\n/**\n * Get the set of type names affected by a migration\n * @param {PendingMigration} migration - Pending migration\n * @returns {Set<string>} Set of affected type names\n */\nfunction getAffectedTypeNames(migration: PendingMigration): Set<string> {\n const typeNames = new Set<string>();\n for (const change of migration.diff.changes) {\n typeNames.add(change.typeName);\n }\n return typeNames;\n}\n\n/**\n * Get the set of type names to be deleted by a migration\n * @param {PendingMigration} migration - Pending migration\n * @returns {Set<string>} Set of type names to delete\n */\nfunction getDeletedTypeNames(migration: PendingMigration): Set<string> {\n const typeNames = new Set<string>();\n for (const change of migration.diff.changes) {\n if (change.kind === \"type_removed\") {\n typeNames.add(change.typeName);\n }\n }\n return typeNames;\n}\n\n/**\n * Execute services creation (called once at the beginning of migration flow)\n * @param {OperatorClient} client - Operator client instance\n * @param {TailorDBChangeSet} changeSet - TailorDB change set\n * @returns {Promise<void>} Promise that resolves when services are created\n */\nasync function executeServicesCreation(\n client: OperatorClient,\n changeSet: TailorDBChangeSet,\n): Promise<void> {\n await Promise.all([\n ...changeSet.service.creates.map(async (create) => {\n await client.createTailorDBService(create.request);\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.service.updates.map((update) => client.setMetadata(update.metaRequest)),\n ]);\n}\n\n/**\n * Track which types have been created/updated across migrations\n */\nconst processedTypes = {\n created: new Set<string>(),\n updated: new Set<string>(),\n gqlPermissionsProcessed: new Set<string>(),\n reset() {\n this.created.clear();\n this.updated.clear();\n this.gqlPermissionsProcessed.clear();\n },\n};\n\n/**\n * Execute pre-migration phase for a single migration\n * @param {OperatorClient} client - Operator client instance\n * @param {TailorDBChangeSet} changeSet - TailorDB change set\n * @param {PendingMigration} migration - Single pending migration\n * @returns {Promise<void>} Promise that resolves when pre-migration phase completes\n */\nasync function executeSingleMigrationPrePhase(\n client: OperatorClient,\n changeSet: TailorDBChangeSet,\n migration: PendingMigration,\n): Promise<void> {\n // Build breaking changes map for this single migration\n const breakingChanges = buildBreakingChangesMap([migration]);\n const affectedTypes = getAffectedTypeNames(migration);\n const createdBeforeMigration = new Set(processedTypes.created);\n\n // Types - create/update only types affected by this migration\n await Promise.all([\n // Create types that are affected by this migration and haven't been created yet\n ...changeSet.type.creates\n .filter((create) => {\n const typeName = create.request.tailordbType?.name;\n return typeName && affectedTypes.has(typeName) && !createdBeforeMigration.has(typeName);\n })\n .map((create) => {\n const typeName = create.request.tailordbType?.name;\n if (typeName) processedTypes.created.add(typeName);\n\n const typeChanges = typeName ? breakingChanges.get(typeName) : undefined;\n\n if (!typeChanges || typeChanges.size === 0) {\n return client.createTailorDBType(create.request);\n }\n\n // Clone request to avoid modifying the original changeSet\n const clonedRequest = structuredClone(create.request);\n if (clonedRequest.tailordbType?.schema?.fields) {\n applyPreMigrationFieldAdjustments(clonedRequest.tailordbType.schema.fields, typeChanges);\n }\n\n return client.createTailorDBType(clonedRequest);\n }),\n // Update types already created in previous migrations (from create list)\n ...changeSet.type.creates\n .filter((create) => {\n const typeName = create.request.tailordbType?.name;\n return typeName && affectedTypes.has(typeName) && createdBeforeMigration.has(typeName);\n })\n .map((create) => {\n const typeName = create.request.tailordbType?.name;\n if (typeName) processedTypes.updated.add(typeName);\n\n const typeChanges = typeName ? breakingChanges.get(typeName) : undefined;\n\n if (!typeChanges || typeChanges.size === 0) {\n return client.updateTailorDBType({\n workspaceId: create.request.workspaceId,\n namespaceName: create.request.namespaceName,\n tailordbType: create.request.tailordbType,\n });\n }\n\n const clonedRequest = structuredClone(create.request);\n if (clonedRequest.tailordbType?.schema?.fields) {\n applyPreMigrationFieldAdjustments(clonedRequest.tailordbType.schema.fields, typeChanges);\n }\n\n return client.updateTailorDBType({\n workspaceId: create.request.workspaceId,\n namespaceName: create.request.namespaceName,\n tailordbType: clonedRequest.tailordbType,\n });\n }),\n // Update types that are affected by this migration\n ...changeSet.type.updates\n .filter((update) => {\n const typeName = update.request.tailordbType?.name;\n return typeName && affectedTypes.has(typeName);\n })\n .map((update) => {\n const typeName = update.request.tailordbType?.name;\n if (typeName) processedTypes.updated.add(typeName);\n\n const typeChanges = typeName ? breakingChanges.get(typeName) : undefined;\n\n if (!typeChanges || typeChanges.size === 0) {\n return client.updateTailorDBType(update.request);\n }\n\n // Clone request to avoid modifying the original changeSet\n const clonedRequest = structuredClone(update.request);\n if (clonedRequest.tailordbType?.schema?.fields) {\n applyPreMigrationFieldAdjustments(clonedRequest.tailordbType.schema.fields, typeChanges);\n }\n\n return client.updateTailorDBType(clonedRequest);\n }),\n ]);\n\n // GQLPermissions - process once (on the first migration)\n if (!processedTypes.gqlPermissionsProcessed.has(migration.namespace)) {\n const gqlPermissionCreatesForNamespace = changeSet.gqlPermission.creates.filter(\n (create) => create.request.namespaceName === migration.namespace,\n );\n const gqlPermissionUpdatesForNamespace = changeSet.gqlPermission.updates.filter(\n (update) => update.request.namespaceName === migration.namespace,\n );\n const gqlPermissionTypeNames = new Set(\n gqlPermissionCreatesForNamespace.map((create) => create.name),\n );\n const missingTypeCreates = changeSet.type.creates.filter((create) => {\n const typeName = create.request.tailordbType?.name;\n const namespaceName = create.request.namespaceName;\n return (\n namespaceName === migration.namespace &&\n typeName &&\n gqlPermissionTypeNames.has(typeName) &&\n !processedTypes.created.has(typeName)\n );\n });\n if (missingTypeCreates.length > 0) {\n await Promise.all(\n missingTypeCreates.map((create) => {\n const typeName = create.request.tailordbType?.name;\n if (typeName) processedTypes.created.add(typeName);\n return client.createTailorDBType(create.request);\n }),\n );\n }\n processedTypes.gqlPermissionsProcessed.add(migration.namespace);\n await Promise.all([\n ...gqlPermissionCreatesForNamespace.map((create) =>\n client.createTailorDBGQLPermission(create.request),\n ),\n ...gqlPermissionUpdatesForNamespace.map((update) =>\n client.updateTailorDBGQLPermission(update.request),\n ),\n ]);\n }\n}\n\n/**\n * Track which types/permissions have been deleted across migrations\n */\nconst deletedResources = {\n types: new Set<string>(),\n gqlPermissions: new Set<string>(),\n reset() {\n this.types.clear();\n this.gqlPermissions.clear();\n },\n};\n\n/**\n * Execute post-migration phase for a single migration: Apply final types (with required: true) and deletions\n * @param {OperatorClient} client - Operator client instance\n * @param {TailorDBChangeSet} changeSet - TailorDB change set\n * @param {PendingMigration} migration - Single pending migration\n * @returns {Promise<void>} Promise that resolves when post-migration phase completes\n */\nasync function executeSingleMigrationPostPhase(\n client: OperatorClient,\n changeSet: TailorDBChangeSet,\n migration: PendingMigration,\n): Promise<void> {\n // Build breaking changes map for this single migration\n const breakingChanges = buildBreakingChangesMap([migration]);\n const affectedTypes = getAffectedTypeNames(migration);\n const deletedTypeNames = getDeletedTypeNames(migration);\n\n // Types - apply final schema values for types affected by this migration\n // Pre-migration used cloned requests, so the original changeSet still has correct values\n try {\n await Promise.all([\n // For newly created types that had breaking changes in this migration, send update with final values\n ...changeSet.type.creates\n .filter((create) => {\n const typeName = create.request.tailordbType?.name;\n return typeName && affectedTypes.has(typeName) && breakingChanges.has(typeName);\n })\n .map((create) =>\n client.updateTailorDBType({\n workspaceId: create.request.workspaceId,\n namespaceName: create.request.namespaceName,\n tailordbType: create.request.tailordbType,\n }),\n ),\n // For updated types affected by this migration, send update with final values\n ...changeSet.type.updates\n .filter((update) => {\n const typeName = update.request.tailordbType?.name;\n return typeName && affectedTypes.has(typeName) && breakingChanges.has(typeName);\n })\n .map((update) => client.updateTailorDBType(update.request)),\n ]);\n } catch (error) {\n handleOptionalToRequiredError(error, [\n \"This error occurred during post-migration phase. Please check your migration script.\",\n \"Ensure all existing records have values for fields being changed to required.\",\n ]);\n }\n\n // Delete types that are removed in this migration\n if (deletedTypeNames.size > 0) {\n // First delete GQL permissions for the types being deleted\n const gqlPermissionsToDelete = changeSet.gqlPermission.deletes.filter((del) => {\n const permKey = `${del.request.namespaceName}/${del.name}`;\n if (deletedResources.gqlPermissions.has(permKey)) return false;\n // Check if this permission is for a type being deleted in this migration\n // del.name and del.request.typeName both hold the type name\n const typeName = del.name;\n if (typeName && deletedTypeNames.has(typeName)) {\n deletedResources.gqlPermissions.add(permKey);\n return true;\n }\n return false;\n });\n await Promise.all(\n gqlPermissionsToDelete.map((del) => client.deleteTailorDBGQLPermission(del.request)),\n );\n\n // Then delete the types\n const typesToDelete = changeSet.type.deletes.filter((del) => {\n // del.name and del.request.tailordbTypeName both hold the type name\n const typeName = del.name;\n if (!typeName || deletedResources.types.has(typeName)) return false;\n if (deletedTypeNames.has(typeName)) {\n deletedResources.types.add(typeName);\n return true;\n }\n return false;\n });\n await Promise.all(typesToDelete.map((del) => client.deleteTailorDBType(del.request)));\n }\n}\n\n/**\n * Plan TailorDB-related changes based on current and desired state.\n * @param context - Planning context\n * @returns Planned changes\n */\nexport async function planTailorDB(context: PlanContext) {\n const { client, workspaceId, application, forRemoval, config, noSchemaCheck } = context;\n const tailordbs: TailorDBService[] = [];\n if (!forRemoval) {\n for (const tailordb of application.tailorDBServices) {\n await tailordb.loadTypes();\n tailordbs.push(tailordb);\n }\n }\n const executors = forRemoval\n ? []\n : Object.values((await application.executorService?.loadExecutors()) ?? {});\n\n const {\n changeSet: serviceChangeSet,\n conflicts,\n unmanaged,\n resourceOwners,\n } = await planServices(client, workspaceId, application.name, tailordbs);\n const deletedServices = serviceChangeSet.deletes.map((del) => del.name);\n const [typeChangeSet, gqlPermissionChangeSet] = await Promise.all([\n planTypes(client, workspaceId, tailordbs, executors, deletedServices),\n planGqlPermissions(client, workspaceId, tailordbs, deletedServices),\n ]);\n\n serviceChangeSet.print();\n typeChangeSet.print();\n gqlPermissionChangeSet.print();\n\n return {\n changeSet: {\n service: serviceChangeSet,\n type: typeChangeSet,\n gqlPermission: gqlPermissionChangeSet,\n },\n conflicts,\n unmanaged,\n resourceOwners,\n context: {\n workspaceId,\n application,\n config,\n noSchemaCheck: noSchemaCheck ?? false,\n },\n };\n}\n\ntype CreateService = {\n name: string;\n request: MessageInitShape<typeof CreateTailorDBServiceRequestSchema>;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateService = {\n name: string;\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteService = {\n name: string;\n request: MessageInitShape<typeof DeleteTailorDBServiceRequestSchema>;\n};\n\nfunction trn(workspaceId: string, name: string) {\n return `${trnPrefix(workspaceId)}:tailordb:${name}`;\n}\n\nasync function planServices(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n tailordbs: ReadonlyArray<TailorDBService>,\n) {\n const changeSet = createChangeSet<CreateService, UpdateService, DeleteService>(\n \"TailorDB services\",\n );\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { tailordbServices, nextPageToken } = await client.listTailorDBServices({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [tailordbServices, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n const existingServices: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n if (!resource.namespace?.name) {\n return;\n }\n const { metadata } = await client.getMetadata({\n trn: trn(workspaceId, resource.namespace.name),\n });\n existingServices[resource.namespace.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n allLabels: metadata?.labels,\n };\n }),\n );\n\n for (const tailordb of tailordbs) {\n const existing = existingServices[tailordb.namespace];\n const metaRequest = await buildMetaRequest(\n trn(workspaceId, tailordb.namespace),\n appName,\n existing?.allLabels,\n );\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"TailorDB service\",\n resourceName: tailordb.namespace,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"TailorDB service\",\n resourceName: tailordb.namespace,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: tailordb.namespace,\n metaRequest,\n });\n delete existingServices[tailordb.namespace];\n } else {\n changeSet.creates.push({\n name: tailordb.namespace,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n // Set UTC to match tailorctl/terraform\n defaultTimezone: \"UTC\",\n },\n metaRequest,\n });\n }\n }\n Object.entries(existingServices).forEach(([namespaceName]) => {\n const label = existingServices[namespaceName]?.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete services managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name: namespaceName,\n request: {\n workspaceId,\n namespaceName,\n },\n });\n }\n });\n\n return { changeSet, conflicts, unmanaged, resourceOwners };\n}\n\ntype CreateType = {\n name: string;\n request: MessageInitShape<typeof CreateTailorDBTypeRequestSchema>;\n};\n\ntype UpdateType = {\n name: string;\n request: MessageInitShape<typeof UpdateTailorDBTypeRequestSchema>;\n};\n\ntype DeleteType = {\n name: string;\n request: MessageInitShape<typeof DeleteTailorDBTypeRequestSchema>;\n};\n\nasync function planTypes(\n client: OperatorClient,\n workspaceId: string,\n tailordbs: ReadonlyArray<TailorDBService>,\n executors: ReadonlyArray<Executor>,\n deletedServices: ReadonlyArray<string>,\n filteredTypesByNamespace?: Map<string, Record<string, TailorDBType>>,\n) {\n const changeSet = createChangeSet<CreateType, UpdateType, DeleteType>(\"TailorDB types\");\n\n const fetchTypes = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [tailordbTypes, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n const executorUsedTypes = new Set<string>();\n for (const executor of executors) {\n if (\n executor.trigger.kind === \"recordCreated\" ||\n executor.trigger.kind === \"recordUpdated\" ||\n executor.trigger.kind === \"recordDeleted\"\n ) {\n executorUsedTypes.add(executor.trigger.typeName);\n }\n }\n\n // Validate that types used by executors don't have publishEvents explicitly set to false\n for (const tailordb of tailordbs) {\n const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.types;\n for (const typeName of Object.keys(types)) {\n const type = types[typeName];\n if (executorUsedTypes.has(typeName) && type.settings?.publishEvents === false) {\n throw new Error(\n `Type \"${typeName}\" has publishEvents set to false, but it is used by an executor with a record trigger. ` +\n `Either remove the publishEvents: false setting or remove the executor trigger for this type.`,\n );\n }\n }\n }\n\n for (const tailordb of tailordbs) {\n const existingTypes = await fetchTypes(tailordb.namespace);\n const existingNameSet = new Set<string>();\n existingTypes.forEach((type) => existingNameSet.add(type.name));\n\n // Use filtered types if provided, otherwise use local types\n const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.types;\n\n for (const typeName of Object.keys(types)) {\n const tailordbType = generateTailorDBTypeManifest(\n types[typeName],\n executorUsedTypes,\n tailordb.config.gqlOperations,\n );\n if (existingNameSet.has(typeName)) {\n changeSet.updates.push({\n name: typeName,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n tailordbType,\n },\n });\n existingNameSet.delete(typeName);\n } else {\n changeSet.creates.push({\n name: typeName,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n tailordbType,\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n tailordbTypeName: name,\n },\n });\n });\n }\n for (const namespaceName of deletedServices) {\n const existingTypes = await fetchTypes(namespaceName);\n existingTypes.forEach((typ) => {\n changeSet.deletes.push({\n name: typ.name,\n request: {\n workspaceId,\n namespaceName,\n tailordbTypeName: typ.name,\n },\n });\n });\n }\n return changeSet;\n}\n\n// TODO(remiposo): Copied the type-processor / aggregator processing almost as-is.\n// This will need refactoring later.\n/**\n * Generate a TailorDB type manifest from parsed type\n * @param {TailorDBType} type - Parsed TailorDB type\n * @param {ReadonlySet<string>} executorUsedTypes - Set of types used by executors\n * @param {GqlOperations} [namespaceGqlOperations] - Default gqlOperations for the namespace (already normalized)\n * @returns {MessageInitShape<typeof TailorDBTypeSchema>} Type manifest\n */\nfunction generateTailorDBTypeManifest(\n type: TailorDBType,\n executorUsedTypes: ReadonlySet<string>,\n namespaceGqlOperations?: GqlOperations,\n): MessageInitShape<typeof TailorDBTypeSchema> {\n // This ensures that explicitly provided pluralForm like \"PurchaseOrderList\" becomes \"purchaseOrderList\"\n const pluralForm = inflection.camelize(type.pluralForm, true);\n\n const defaultSettings: {\n aggregation: boolean;\n bulkUpsert: boolean;\n draft: boolean;\n defaultQueryLimitSize: bigint;\n maxBulkUpsertSize: bigint;\n pluralForm: string;\n publishRecordEvents: boolean;\n disableGqlOperations?: {\n create: boolean;\n update: boolean;\n delete: boolean;\n read: boolean;\n };\n } = {\n aggregation: type.settings?.aggregation || false,\n bulkUpsert: type.settings?.bulkUpsert || false,\n draft: false,\n defaultQueryLimitSize: 100n,\n maxBulkUpsertSize: 1000n,\n pluralForm,\n publishRecordEvents: false,\n };\n\n // Determine publishRecordEvents (user-facing name: publishEvents):\n // - If user explicitly sets a value (true or false), respect that (validation already ensures no executor conflict)\n // - If not set, use executor detection (true if executor uses this type)\n if (type.settings?.publishEvents !== undefined) {\n defaultSettings.publishRecordEvents = type.settings.publishEvents;\n } else if (executorUsedTypes.has(type.name)) {\n defaultSettings.publishRecordEvents = true;\n }\n\n // Both type.settings.gqlOperations and namespaceGqlOperations are already normalized by schema\n const ops = type.settings?.gqlOperations ?? namespaceGqlOperations;\n if (ops) {\n defaultSettings.disableGqlOperations = {\n create: ops.create === false,\n update: ops.update === false,\n delete: ops.delete === false,\n read: ops.read === false,\n };\n }\n\n const fields: Record<string, MessageInitShape<typeof TailorDBType_FieldConfigSchema>> = {};\n\n Object.keys(type.fields)\n .filter((fieldName) => fieldName !== \"id\")\n .forEach((fieldName) => {\n const fieldConfig = type.fields[fieldName].config;\n const fieldType = fieldConfig.type;\n const fieldEntry: MessageInitShape<typeof TailorDBType_FieldConfigSchema> = {\n type: fieldType,\n allowedValues: fieldType === \"enum\" ? fieldConfig.allowedValues || [] : [],\n description: fieldConfig.description || \"\",\n validate: toProtoFieldValidate(fieldConfig),\n array: fieldConfig.array || false,\n index: fieldConfig.index || false,\n unique: fieldConfig.unique || false,\n foreignKey: fieldConfig.foreignKey || false,\n foreignKeyType: fieldConfig.foreignKeyType,\n foreignKeyField: fieldConfig.foreignKeyField,\n required: fieldConfig.required !== false,\n vector: fieldConfig.vector || false,\n ...toProtoFieldHooks(fieldConfig),\n ...(fieldConfig.serial && {\n serial: {\n start: fieldConfig.serial.start as unknown as bigint,\n ...(fieldConfig.serial.maxValue && {\n maxValue: fieldConfig.serial.maxValue as unknown as bigint,\n }),\n ...(fieldConfig.serial.format && {\n format: fieldConfig.serial.format,\n }),\n },\n }),\n ...(fieldConfig.scale !== undefined && { scale: fieldConfig.scale }),\n };\n\n // Handle nested fields\n if (fieldConfig.type === \"nested\" && fieldConfig.fields) {\n fieldEntry.fields = processNestedFields(fieldConfig.fields);\n }\n\n fields[fieldName] = fieldEntry;\n });\n\n const relationships: Record<\n string,\n MessageInitShape<typeof TailorDBType_RelationshipConfigSchema>\n > = {};\n\n for (const [relationName, rel] of Object.entries(type.forwardRelationships)) {\n relationships[relationName] = {\n refType: rel.targetType,\n refField: rel.sourceField,\n srcField: rel.targetField,\n array: rel.isArray,\n description: rel.description,\n };\n }\n\n for (const [relationName, rel] of Object.entries(type.backwardRelationships)) {\n relationships[relationName] = {\n refType: rel.targetType,\n refField: rel.targetField,\n srcField: rel.sourceField,\n array: rel.isArray,\n description: rel.description,\n };\n }\n\n // Process indexes from metadata\n const indexes: Record<string, MessageInitShape<typeof TailorDBType_IndexSchema>> = {};\n if (type.indexes) {\n Object.entries(type.indexes).forEach(([key, index]) => {\n indexes[key] = {\n fieldNames: index.fields,\n unique: index.unique || false,\n };\n });\n }\n\n // Process files from metadata\n const files: Record<string, MessageInitShape<typeof TailorDBType_FileConfigSchema>> = {};\n if (type.files) {\n Object.entries(type.files).forEach(([key, description]) => {\n files[key] = { description: description || \"\" };\n });\n }\n\n // To be secure by default, add Permission settings that reject everyone\n // when Permission/RecordPermission is not configured.\n const defaultPermission: MessageInitShape<typeof TailorDBType_PermissionSchema> = {\n create: [],\n read: [],\n update: [],\n delete: [],\n };\n const permission = type.permissions.record\n ? protoPermission(type.permissions.record)\n : defaultPermission;\n\n return {\n name: type.name,\n schema: {\n description: type.description || \"\",\n fields,\n relationships: relationships,\n settings: defaultSettings,\n extends: false,\n directives: [],\n indexes,\n files,\n permission,\n },\n };\n}\n\nfunction toProtoFieldValidate(\n fieldConfig: OperatorFieldConfig,\n): MessageInitShape<typeof TailorDBType_FieldConfigSchema>[\"validate\"] {\n return (fieldConfig.validate || []).map((val) => ({\n action: TailorDBType_PermitAction.DENY,\n errorMessage: val.errorMessage || \"\",\n ...(val.script && {\n script: {\n expr: val.script.expr ? `!${val.script.expr}` : \"\",\n },\n }),\n }));\n}\n\nfunction toProtoFieldHooks(\n fieldConfig: OperatorFieldConfig,\n): Pick<MessageInitShape<typeof TailorDBType_FieldConfigSchema>, \"hooks\"> | Record<never, never> {\n if (!fieldConfig.hooks) {\n return {};\n }\n return {\n hooks: {\n create: fieldConfig.hooks.create\n ? {\n expr: fieldConfig.hooks.create.expr || \"\",\n }\n : undefined,\n update: fieldConfig.hooks.update\n ? {\n expr: fieldConfig.hooks.update.expr || \"\",\n }\n : undefined,\n },\n };\n}\n\nfunction processNestedFields(\n fields: Record<string, OperatorFieldConfig>,\n): Record<string, MessageInitShape<typeof TailorDBType_FieldConfigSchema>> {\n const nestedFields: Record<string, MessageInitShape<typeof TailorDBType_FieldConfigSchema>> = {};\n\n Object.entries(fields).forEach(([nestedFieldName, nestedFieldConfig]) => {\n const nestedType = nestedFieldConfig.type;\n\n if (nestedType === \"nested\" && nestedFieldConfig.fields) {\n const deepNestedFields = processNestedFields(nestedFieldConfig.fields);\n nestedFields[nestedFieldName] = {\n type: \"nested\",\n allowedValues: nestedFieldConfig.allowedValues || [],\n description: nestedFieldConfig.description || \"\",\n validate: toProtoFieldValidate(nestedFieldConfig),\n required: nestedFieldConfig.required ?? true,\n array: nestedFieldConfig.array ?? false,\n index: false,\n unique: false,\n foreignKey: false,\n vector: false,\n ...toProtoFieldHooks(nestedFieldConfig),\n fields: deepNestedFields,\n ...(nestedFieldConfig.scale !== undefined && { scale: nestedFieldConfig.scale }),\n };\n } else {\n nestedFields[nestedFieldName] = {\n type: nestedType,\n allowedValues: nestedType === \"enum\" ? nestedFieldConfig.allowedValues || [] : [],\n description: nestedFieldConfig.description || \"\",\n validate: toProtoFieldValidate(nestedFieldConfig),\n required: nestedFieldConfig.required ?? true,\n array: nestedFieldConfig.array ?? false,\n index: false,\n unique: false,\n foreignKey: false,\n vector: false,\n ...toProtoFieldHooks(nestedFieldConfig),\n ...(nestedFieldConfig.serial && {\n serial: {\n start: nestedFieldConfig.serial.start as unknown as bigint,\n ...(nestedFieldConfig.serial.maxValue && {\n maxValue: nestedFieldConfig.serial.maxValue as unknown as bigint,\n }),\n ...(nestedFieldConfig.serial.format && {\n format: nestedFieldConfig.serial.format,\n }),\n },\n }),\n ...(nestedFieldConfig.scale !== undefined && { scale: nestedFieldConfig.scale }),\n };\n }\n });\n\n return nestedFields;\n}\n\nfunction protoPermission(\n permission: StandardTailorTypePermission,\n): MessageInitShape<typeof TailorDBType_PermissionSchema> {\n const ret: MessageInitShape<typeof TailorDBType_PermissionSchema> = {};\n for (const [key, policies] of Object.entries(permission)) {\n ret[key as keyof StandardTailorTypePermission] = policies.map((policy) => protoPolicy(policy));\n }\n return ret;\n}\n\nfunction protoPolicy(\n policy: StandardActionPermission<\"record\">,\n): MessageInitShape<typeof TailorDBType_Permission_PolicySchema> {\n let permit: TailorDBType_Permission_Permit;\n switch (policy.permit) {\n case \"allow\":\n permit = TailorDBType_Permission_Permit.ALLOW;\n break;\n case \"deny\":\n permit = TailorDBType_Permission_Permit.DENY;\n break;\n default:\n throw new Error(`Unknown permission: ${policy.permit satisfies never}`);\n }\n return {\n conditions: policy.conditions.map((cond) => protoCondition(cond)),\n permit,\n description: policy.description,\n };\n}\n\nfunction protoCondition(\n condition: StandardPermissionCondition<\"record\">,\n): MessageInitShape<typeof TailorDBType_Permission_ConditionSchema> {\n const [left, operator, right] = condition;\n\n const l = protoOperand(left);\n const r = protoOperand(right);\n let op: TailorDBType_Permission_Operator;\n switch (operator) {\n case \"eq\":\n op = TailorDBType_Permission_Operator.EQ;\n break;\n case \"ne\":\n op = TailorDBType_Permission_Operator.NE;\n break;\n case \"in\":\n op = TailorDBType_Permission_Operator.IN;\n break;\n case \"nin\":\n op = TailorDBType_Permission_Operator.NIN;\n break;\n case \"hasAny\":\n op = TailorDBType_Permission_Operator.HAS_ANY;\n break;\n case \"nhasAny\":\n op = TailorDBType_Permission_Operator.NHAS_ANY;\n break;\n default:\n throw new Error(`Unknown operator: ${operator satisfies never}`);\n }\n return {\n left: l,\n operator: op,\n right: r,\n };\n}\n\nfunction protoOperand(\n operand: PermissionOperand,\n): MessageInitShape<typeof TailorDBType_Permission_OperandSchema> {\n if (typeof operand === \"object\" && !Array.isArray(operand)) {\n if (\"user\" in operand) {\n return {\n kind: {\n case: \"userField\",\n value: operand.user,\n },\n };\n } else if (\"record\" in operand) {\n return {\n kind: {\n case: \"recordField\",\n value: operand.record,\n },\n };\n } else if (\"newRecord\" in operand) {\n return {\n kind: {\n case: \"newRecordField\",\n value: operand.newRecord,\n },\n };\n } else if (\"oldRecord\" in operand) {\n return {\n kind: {\n case: \"oldRecordField\",\n value: operand.oldRecord,\n },\n };\n } else {\n throw new Error(`Unknown operand: ${JSON.stringify(operand)}`);\n }\n }\n\n return {\n kind: {\n case: \"value\",\n value: fromJson(ValueSchema, operand),\n },\n };\n}\n\ntype CreateGqlPermission = {\n name: string;\n request: MessageInitShape<typeof CreateTailorDBGQLPermissionRequestSchema>;\n};\n\ntype UpdateGqlPermission = {\n name: string;\n request: MessageInitShape<typeof UpdateTailorDBGQLPermissionRequestSchema>;\n};\n\ntype DeleteGqlPermission = {\n name: string;\n request: MessageInitShape<typeof DeleteTailorDBGQLPermissionRequestSchema>;\n};\n\nasync function planGqlPermissions(\n client: OperatorClient,\n workspaceId: string,\n tailordbs: ReadonlyArray<TailorDBService>,\n deletedServices: ReadonlyArray<string>,\n) {\n const changeSet = createChangeSet<CreateGqlPermission, UpdateGqlPermission, DeleteGqlPermission>(\n \"TailorDB gqlPermissions\",\n );\n\n const fetchGqlPermissions = (namespaceName: string) => {\n return fetchAll(async (pageToken, maxPageSize) => {\n try {\n const { permissions, nextPageToken } = await client.listTailorDBGQLPermissions({\n workspaceId,\n namespaceName,\n pageToken,\n pageSize: maxPageSize,\n });\n return [permissions, nextPageToken];\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n return [[], \"\"];\n }\n throw error;\n }\n });\n };\n\n for (const tailordb of tailordbs) {\n const existingGqlPermissions = await fetchGqlPermissions(tailordb.namespace);\n const existingNameSet = new Set<string>();\n existingGqlPermissions.forEach((gqlPermission) => {\n existingNameSet.add(gqlPermission.typeName);\n });\n\n const types = tailordb.types;\n for (const typeName of Object.keys(types)) {\n const gqlPermission = types[typeName].permissions.gql;\n if (!gqlPermission) {\n continue;\n }\n if (existingNameSet.has(typeName)) {\n changeSet.updates.push({\n name: typeName,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n typeName: typeName,\n permission: protoGqlPermission(gqlPermission),\n },\n });\n existingNameSet.delete(typeName);\n } else {\n changeSet.creates.push({\n name: typeName,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n typeName: typeName,\n permission: protoGqlPermission(gqlPermission),\n },\n });\n }\n }\n existingNameSet.forEach((name) => {\n changeSet.deletes.push({\n name,\n request: {\n workspaceId,\n namespaceName: tailordb.namespace,\n typeName: name,\n },\n });\n });\n }\n for (const namespaceName of deletedServices) {\n const existingGqlPermissions = await fetchGqlPermissions(namespaceName);\n existingGqlPermissions.forEach((gqlPermission) => {\n changeSet.deletes.push({\n name: gqlPermission.typeName,\n request: {\n workspaceId,\n namespaceName,\n typeName: gqlPermission.typeName,\n },\n });\n });\n }\n return changeSet;\n}\n\nfunction protoGqlPermission(\n permission: StandardTailorTypeGqlPermission,\n): MessageInitShape<typeof TailorDBGQLPermissionSchema> {\n return {\n policies: permission.map((policy) => protoGqlPolicy(policy)),\n };\n}\n\nfunction protoGqlPolicy(\n policy: StandardGqlPermissionPolicy,\n): MessageInitShape<typeof TailorDBGQLPermission_PolicySchema> {\n const actions: TailorDBGQLPermission_Action[] = [];\n for (const action of policy.actions) {\n switch (action) {\n case \"all\":\n actions.push(TailorDBGQLPermission_Action.ALL);\n break;\n case \"create\":\n actions.push(TailorDBGQLPermission_Action.CREATE);\n break;\n case \"read\":\n actions.push(TailorDBGQLPermission_Action.READ);\n break;\n case \"update\":\n actions.push(TailorDBGQLPermission_Action.UPDATE);\n break;\n case \"delete\":\n actions.push(TailorDBGQLPermission_Action.DELETE);\n break;\n case \"aggregate\":\n actions.push(TailorDBGQLPermission_Action.AGGREGATE);\n break;\n case \"bulkUpsert\":\n actions.push(TailorDBGQLPermission_Action.BULK_UPSERT);\n break;\n default:\n throw new Error(`Unknown action: ${action satisfies never}`);\n }\n }\n let permit: TailorDBGQLPermission_Permit;\n switch (policy.permit) {\n case \"allow\":\n permit = TailorDBGQLPermission_Permit.ALLOW;\n break;\n case \"deny\":\n permit = TailorDBGQLPermission_Permit.DENY;\n break;\n default:\n throw new Error(`Unknown permission: ${policy.permit satisfies never}`);\n }\n return {\n conditions: policy.conditions.map((cond) => protoGqlCondition(cond)),\n actions,\n permit,\n description: policy.description,\n };\n}\n\nfunction protoGqlCondition(\n condition: StandardPermissionCondition<\"gql\">,\n): MessageInitShape<typeof TailorDBGQLPermission_ConditionSchema> {\n const [left, operator, right] = condition;\n\n const l = protoGqlOperand(left);\n const r = protoGqlOperand(right);\n let op: TailorDBGQLPermission_Operator;\n switch (operator) {\n case \"eq\":\n op = TailorDBGQLPermission_Operator.EQ;\n break;\n case \"ne\":\n op = TailorDBGQLPermission_Operator.NE;\n break;\n case \"in\":\n op = TailorDBGQLPermission_Operator.IN;\n break;\n case \"nin\":\n op = TailorDBGQLPermission_Operator.NIN;\n break;\n case \"hasAny\":\n op = TailorDBGQLPermission_Operator.HAS_ANY;\n break;\n case \"nhasAny\":\n op = TailorDBGQLPermission_Operator.NHAS_ANY;\n break;\n default:\n throw new Error(`Unknown operator: ${operator satisfies never}`);\n }\n return {\n left: l,\n operator: op,\n right: r,\n };\n}\n\nfunction protoGqlOperand(\n operand: PermissionOperand,\n): MessageInitShape<typeof TailorDBGQLPermission_OperandSchema> {\n if (typeof operand === \"object\" && !Array.isArray(operand)) {\n if (\"user\" in operand) {\n return {\n kind: {\n case: \"userField\",\n value: operand.user,\n },\n };\n }\n }\n\n return {\n kind: {\n case: \"value\",\n value: fromJson(ValueSchema, operand),\n },\n };\n}\n\n// ============================================================================\n// Migration Integration\n// ============================================================================\n\ninterface MigrationCheckResult {\n namespace: string;\n migrationsDir: string;\n hasDiff: boolean;\n diff?: MigrationDiff;\n}\n\n/**\n * Check if there are schema differences between migration snapshots and local definitions\n * @param {ReadonlyMap<string, Record<string, TailorDBType>>} typesByNamespace - Types by namespace\n * @param {NamespaceWithMigrations[]} namespacesWithMigrations - Namespaces with migrations config\n * @returns {Promise<MigrationCheckResult[]>} Results for each namespace\n */\nasync function checkMigrationDiffs(\n typesByNamespace: ReadonlyMap<string, Record<string, TailorDBType>>,\n namespacesWithMigrations: NamespaceWithMigrations[],\n): Promise<MigrationCheckResult[]> {\n const results: MigrationCheckResult[] = [];\n\n for (const { namespace, migrationsDir } of namespacesWithMigrations) {\n const localTypes = typesByNamespace.get(namespace);\n if (!localTypes) {\n continue;\n }\n\n // Try to reconstruct snapshot from migrations\n let previousSnapshot;\n try {\n previousSnapshot = reconstructSnapshotFromMigrations(migrationsDir);\n } catch {\n // No migrations directory - this is fine, no check needed\n results.push({\n namespace,\n migrationsDir,\n hasDiff: false,\n });\n continue;\n }\n\n if (!previousSnapshot) {\n // No snapshots yet - user should run migrate generate first\n results.push({\n namespace,\n migrationsDir,\n hasDiff: true,\n diff: undefined, // Indicates no snapshot exists\n });\n continue;\n }\n\n // Compare with local types\n const diff = compareLocalTypesWithSnapshot(previousSnapshot, localTypes, namespace);\n\n results.push({\n namespace,\n migrationsDir,\n hasDiff: hasChanges(diff),\n diff: hasChanges(diff) ? diff : undefined,\n });\n }\n\n return results;\n}\n\n/**\n * Format migration check results for display\n * @param {MigrationCheckResult[]} results - Migration check results\n * @returns {string} Formatted results string\n */\nfunction formatMigrationCheckResults(results: MigrationCheckResult[]): string {\n const lines: string[] = [];\n\n for (const result of results) {\n if (!result.hasDiff) {\n continue;\n }\n\n lines.push(`Namespace: ${result.namespace}`);\n\n if (!result.diff) {\n lines.push(\n \" No migration snapshot found. Run 'tailor-sdk tailordb migration generate' first.\",\n );\n } else {\n lines.push(` ${formatDiffSummary(result.diff)}`);\n lines.push(\"\");\n lines.push(formatMigrationDiff(result.diff));\n }\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n","import { type ApplyPhase } from \"@/cli/commands/apply/apply\";\nimport { type OperatorClient, fetchAll } from \"@/cli/shared/client\";\nimport { createChangeSet, type ChangeSet } from \"./change-set\";\nimport { workflowJobFunctionName } from \"./function-registry\";\nimport { buildMetaRequest, sdkNameLabelKey, type WithLabel } from \"./label\";\nimport type { OwnerConflict, UnmanagedResource } from \"./confirm\";\nimport type { Workflow } from \"@/types/workflow.generated\";\nimport type { MessageInitShape } from \"@bufbuild/protobuf\";\nimport type { SetMetadataRequestSchema } from \"@tailor-proto/tailor/v1/metadata_pb\";\n\n/**\n * Apply workflow changes for the given phase.\n * @param client - Operator client instance\n * @param result - Planned workflow changes\n * @param phase - Apply phase\n * @returns Promise that resolves when workflows are applied\n */\nexport async function applyWorkflow(\n client: OperatorClient,\n result: Awaited<ReturnType<typeof planWorkflow>>,\n phase: Extract<ApplyPhase, \"create-update\" | \"delete\"> = \"create-update\",\n) {\n const { changeSet, appName } = result;\n if (phase === \"create-update\") {\n // Register job functions used by any workflow, returns map of job name to version\n const jobFunctionVersions = await registerJobFunctions(client, changeSet, appName);\n\n // Create and update workflows in parallel\n // Each workflow only gets the job function versions it actually uses\n await Promise.all([\n ...changeSet.creates.map(async (create) => {\n const filteredVersions = filterJobFunctionVersions(\n jobFunctionVersions,\n create.usedJobNames,\n );\n await client.createWorkflow({\n workspaceId: create.workspaceId,\n workflowName: create.workflow.name,\n mainJobFunctionName: create.workflow.mainJob.name,\n jobFunctions: filteredVersions,\n });\n await client.setMetadata(create.metaRequest);\n }),\n ...changeSet.updates.map(async (update) => {\n const filteredVersions = filterJobFunctionVersions(\n jobFunctionVersions,\n update.usedJobNames,\n );\n await client.updateWorkflow({\n workspaceId: update.workspaceId,\n workflowName: update.workflow.name,\n mainJobFunctionName: update.workflow.mainJob.name,\n jobFunctions: filteredVersions,\n });\n await client.setMetadata(update.metaRequest);\n }),\n ]);\n } else if (phase === \"delete\") {\n // Delete workflows\n await Promise.all(\n changeSet.deletes.map((del) =>\n client.deleteWorkflow({\n workspaceId: del.workspaceId,\n workflowId: del.workflowId,\n }),\n ),\n );\n }\n}\n\n/**\n * Filter job function versions to only include those used by a workflow\n * @param allVersions - Map of job function names to versions\n * @param usedJobNames - Job names used by the workflow\n * @returns Filtered job function versions\n */\nfunction filterJobFunctionVersions(\n allVersions: { [key: string]: bigint },\n usedJobNames: string[],\n): { [key: string]: bigint } {\n const filtered: { [key: string]: bigint } = {};\n for (const jobName of usedJobNames) {\n if (allVersions[jobName] !== undefined) {\n filtered[jobName] = allVersions[jobName];\n }\n }\n return filtered;\n}\n\n/**\n * Register job functions used by any workflow.\n * Only registers jobs that are actually used (based on usedJobNames in changeSet).\n * Uses create for new jobs and update for existing jobs.\n * Sets metadata on used JobFunctions and removes metadata from unused ones.\n * @param client - Operator client instance\n * @param changeSet - Workflow change set\n * @param appName - Application name\n * @returns Map of job function names to versions\n */\nasync function registerJobFunctions(\n client: OperatorClient,\n changeSet: ChangeSet<CreateWorkflow, UpdateWorkflow, DeleteWorkflow>,\n appName: string,\n): Promise<{ [key: string]: bigint }> {\n const jobFunctionVersions: { [key: string]: bigint } = {};\n\n // Get workspaceId from the first workflow\n const firstWorkflow = changeSet.creates[0] || changeSet.updates[0];\n if (!firstWorkflow) {\n return jobFunctionVersions;\n }\n\n const { workspaceId } = firstWorkflow;\n\n // Collect all job names used by any workflow\n const allUsedJobNames = new Set<string>();\n for (const item of [...changeSet.creates, ...changeSet.updates]) {\n for (const jobName of item.usedJobNames) {\n allUsedJobNames.add(jobName);\n }\n }\n\n // Fetch existing job functions with their names\n const existingJobFunctions = await fetchAll(async (pageToken, maxPageSize) => {\n const response = await client.listWorkflowJobFunctions({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [response.jobFunctions.map((j) => j.name), response.nextPageToken];\n });\n const existingJobNamesSet = new Set(existingJobFunctions);\n\n // Register job functions in parallel\n // Use create for new jobs, update for existing jobs\n const results = await Promise.all(\n Array.from(allUsedJobNames).map(async (jobName) => {\n const isExisting = existingJobNamesSet.has(jobName);\n const response = isExisting\n ? await client.updateWorkflowJobFunction({\n workspaceId,\n jobFunctionName: jobName,\n scriptRef: workflowJobFunctionName(jobName),\n })\n : await client.createWorkflowJobFunction({\n workspaceId,\n jobFunctionName: jobName,\n scriptRef: workflowJobFunctionName(jobName),\n });\n\n // Set metadata to mark this JobFunction as owned by this app\n await client.setMetadata(\n await buildMetaRequest(jobFunctionTrn(workspaceId, jobName), appName),\n );\n\n return { jobName, version: response.jobFunction?.version };\n }),\n );\n\n for (const { jobName, version } of results) {\n if (version) {\n jobFunctionVersions[jobName] = version;\n }\n }\n\n // Remove metadata from JobFunctions that are no longer used by this app\n const unusedJobFunctions = existingJobFunctions.filter(\n (jobName) => !allUsedJobNames.has(jobName),\n );\n await Promise.all(\n unusedJobFunctions.map(async (jobName) => {\n const { metadata } = await client.getMetadata({\n trn: jobFunctionTrn(workspaceId, jobName),\n });\n const label = metadata?.labels?.[sdkNameLabelKey];\n\n // Only remove metadata if owned by this app\n if (label === appName) {\n await client.setMetadata({\n trn: jobFunctionTrn(workspaceId, jobName),\n labels: { [sdkNameLabelKey]: \"\" }, // Remove ownership\n });\n }\n }),\n );\n\n return jobFunctionVersions;\n}\n\ntype CreateWorkflow = {\n name: string;\n workspaceId: string;\n workflow: Workflow;\n usedJobNames: string[];\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype UpdateWorkflow = {\n name: string;\n workspaceId: string;\n workflow: Workflow;\n usedJobNames: string[];\n metaRequest: MessageInitShape<typeof SetMetadataRequestSchema>;\n};\n\ntype DeleteWorkflow = {\n name: string;\n workspaceId: string;\n workflowId: string;\n};\n\nfunction workflowTrn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:workflow:${name}`;\n}\n\nfunction jobFunctionTrn(workspaceId: string, name: string) {\n return `trn:v1:workspace:${workspaceId}:workflow_job_function:${name}`;\n}\n\n/**\n * Plan workflow changes and job functions based on current and desired state.\n * @param client - Operator client instance\n * @param workspaceId - Workspace ID\n * @param appName - Application name\n * @param workflows - Parsed workflows\n * @param mainJobDeps - Main job dependencies by workflow\n * @returns Planned workflow changes\n */\nexport async function planWorkflow(\n client: OperatorClient,\n workspaceId: string,\n appName: string,\n workflows: Record<string, Workflow>,\n mainJobDeps: Record<string, string[]>,\n) {\n const changeSet = createChangeSet<CreateWorkflow, UpdateWorkflow, DeleteWorkflow>(\"Workflows\");\n const conflicts: OwnerConflict[] = [];\n const unmanaged: UnmanagedResource[] = [];\n const resourceOwners = new Set<string>();\n\n // Fetch existing workflows from API\n const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {\n const response = await client.listWorkflows({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [response.workflows.map((w) => ({ id: w.id, name: w.name })), response.nextPageToken];\n });\n const existingWorkflows: WithLabel<(typeof withoutLabel)[number]> = {};\n await Promise.all(\n withoutLabel.map(async (resource) => {\n const { metadata } = await client.getMetadata({\n trn: workflowTrn(workspaceId, resource.name),\n });\n existingWorkflows[resource.name] = {\n resource,\n label: metadata?.labels[sdkNameLabelKey],\n };\n }),\n );\n\n for (const workflow of Object.values(workflows)) {\n const existing = existingWorkflows[workflow.name];\n const metaRequest = await buildMetaRequest(workflowTrn(workspaceId, workflow.name), appName);\n // Get jobs used by this workflow from mainJobDeps\n const usedJobNames = mainJobDeps[workflow.mainJob.name];\n if (!usedJobNames) {\n throw new Error(\n `Job \"${workflow.mainJob.name}\" (mainJob of workflow \"${workflow.name}\") was not found.\\n\\n` +\n `Possible causes:\\n` +\n ` - The job is not exported as a named export\\n` +\n ` - The file containing the job is not included in workflow.files glob pattern\\n\\n` +\n `Solution:\\n` +\n ` export const ${workflow.mainJob.name} = createWorkflowJob({ name: \"${workflow.mainJob.name}\", ... })`,\n );\n }\n\n if (existing) {\n if (!existing.label) {\n unmanaged.push({\n resourceType: \"Workflow\",\n resourceName: workflow.name,\n });\n } else if (existing.label !== appName) {\n conflicts.push({\n resourceType: \"Workflow\",\n resourceName: workflow.name,\n currentOwner: existing.label,\n });\n }\n\n changeSet.updates.push({\n name: workflow.name,\n workspaceId,\n workflow,\n usedJobNames,\n metaRequest,\n });\n delete existingWorkflows[workflow.name];\n } else {\n changeSet.creates.push({\n name: workflow.name,\n workspaceId,\n workflow,\n usedJobNames,\n metaRequest,\n });\n }\n }\n\n Object.values(existingWorkflows).forEach((existing) => {\n const label = existing?.label;\n if (label && label !== appName) {\n resourceOwners.add(label);\n }\n // Only delete workflows managed by this application\n if (label === appName) {\n changeSet.deletes.push({\n name: existing!.resource.name,\n workspaceId,\n workflowId: existing!.resource.id,\n });\n }\n });\n\n changeSet.print();\n return { changeSet, conflicts, unmanaged, resourceOwners, appName };\n}\n","import * as fs from \"node:fs\";\nimport { findUpSync } from \"find-up-simple\";\nimport * as path from \"pathe\";\nimport { hashFile } from \"@/cli/cache/hasher\";\nimport { createCacheManager } from \"@/cli/cache/manager\";\nimport { loadApplication, type Application } from \"@/cli/services/application\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { readPackageJson } from \"@/cli/shared/package-json\";\nimport { generateUserTypes } from \"@/cli/shared/type-generator\";\nimport { withSpan } from \"@/cli/telemetry\";\nimport { PluginManager } from \"@/plugin/manager\";\nimport { applyApplication, planApplication } from \"./application\";\nimport { applyAuth, planAuth } from \"./auth\";\nimport {\n confirmImportantResourceDeletion,\n confirmOwnerConflict,\n confirmUnmanagedResources,\n type ImportantResourceDeletion,\n type OwnerConflict,\n type UnmanagedResource,\n} from \"./confirm\";\nimport { applyExecutor, planExecutor } from \"./executor\";\nimport {\n applyFunctionRegistry,\n collectFunctionEntries,\n planFunctionRegistry,\n} from \"./function-registry\";\nimport { applyIdP, planIdP } from \"./idp\";\nimport { applyPipeline, planPipeline } from \"./resolver\";\nimport { applySecretManager, planSecretManager } from \"./secret-manager\";\nimport { applyStaticWebsite, planStaticWebsite } from \"./staticwebsite\";\nimport { applyTailorDB, planTailorDB } from \"./tailordb\";\nimport { applyWorkflow, planWorkflow } from \"./workflow\";\nimport type { OperatorClient } from \"@/cli/shared/client\";\nimport type { LoadedConfig } from \"@/cli/shared/config-loader\";\n\nexport interface ApplyOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n dryRun?: boolean;\n yes?: boolean;\n noSchemaCheck?: boolean;\n noCache?: boolean;\n cleanCache?: boolean;\n // NOTE(remiposo): Provide an option to run build-only for testing purposes.\n // This could potentially be exposed as a CLI option.\n buildOnly?: boolean;\n}\n\nexport interface PlanContext {\n client: OperatorClient;\n workspaceId: string;\n application: Readonly<Application>;\n forRemoval: boolean;\n config: LoadedConfig;\n noSchemaCheck?: boolean;\n}\n\nexport type ApplyPhase = \"create-update\" | \"delete\" | \"delete-resources\" | \"delete-services\";\n\n/**\n * Apply the configured application to the Tailor platform.\n * @param options - Options for apply execution\n * @returns Promise that resolves when apply completes\n */\nexport async function apply(options?: ApplyOptions) {\n return withSpan(\"apply\", async (rootSpan) => {\n rootSpan.setAttribute(\"apply.dry_run\", options?.dryRun ?? false);\n\n // Phase 0: Build\n const { config, application, workflowBuildResult, buildOnly } = await withSpan(\n \"build\",\n async () => {\n const { config, plugins } = await withSpan(\"build.loadConfig\", () =>\n loadConfig(options?.configPath),\n );\n\n const dryRun = options?.dryRun ?? false;\n const buildOnly =\n options?.buildOnly ?? process.env.TAILOR_PLATFORM_SDK_BUILD_ONLY === \"true\";\n const noCache = options?.noCache ?? false;\n\n // Initialize cache manager\n const packageJson = await readPackageJson();\n const cacheDir = path.resolve(getDistDir(), \"cache\");\n if (options?.cleanCache) {\n fs.rmSync(cacheDir, { recursive: true, force: true });\n logger.info(\"Bundle cache cleaned\");\n }\n const configDir = path.dirname(config.path);\n const lockfilePath =\n findUpSync(\"pnpm-lock.yaml\", { cwd: configDir }) ??\n findUpSync(\"package-lock.json\", { cwd: configDir }) ??\n findUpSync(\"yarn.lock\", { cwd: configDir }) ??\n findUpSync(\"bun.lock\", { cwd: configDir });\n const cacheManager = createCacheManager({\n enabled: !noCache,\n cacheDir,\n sdkVersion: packageJson.version ?? \"unknown\",\n lockfileHash: lockfilePath ? hashFile(lockfilePath) : undefined,\n });\n\n let pluginManager: PluginManager | undefined;\n if (plugins.length > 0) {\n pluginManager = new PluginManager(plugins);\n }\n\n await withSpan(\"build.generateUserTypes\", () =>\n generateUserTypes({ config, configPath: config.path }),\n );\n\n let application: Application;\n let workflowBuildResult: Awaited<ReturnType<typeof loadApplication>>[\"workflowBuildResult\"];\n try {\n const result = await withSpan(\"build.loadApplication\", () =>\n loadApplication({ config, pluginManager, bundleCache: cacheManager.bundleCache }),\n );\n application = result.application;\n workflowBuildResult = result.workflowBuildResult;\n } finally {\n // Persist even on partial failure: successfully built bundles\n // are cached so the next run only rebuilds what failed.\n cacheManager.finalize();\n }\n\n return { config, plugins, application, workflowBuildResult, dryRun, buildOnly };\n },\n );\n if (buildOnly) return;\n\n // Initialize client\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n rootSpan.setAttribute(\"app.name\", application.name);\n rootSpan.setAttribute(\"workspace.id\", workspaceId);\n\n // Collect function entries from bundled scripts (after build, before plan)\n const workflowService = application.workflowService;\n const functionEntries = collectFunctionEntries(application, workflowService?.jobs ?? []);\n\n const dryRun = options?.dryRun ?? false;\n const yes = options?.yes ?? false;\n\n // Phase 1: Plan\n const {\n functionRegistry,\n tailorDB,\n staticWebsite,\n idp,\n auth,\n pipeline,\n app,\n executor,\n workflow,\n secretManager,\n } = await withSpan(\"plan\", async () => {\n const ctx: PlanContext = {\n client,\n workspaceId,\n application,\n forRemoval: false,\n config,\n noSchemaCheck: options?.noSchemaCheck,\n };\n const [\n functionRegistry,\n tailorDB,\n staticWebsite,\n idp,\n auth,\n pipeline,\n app,\n executor,\n workflow,\n secretManager,\n ] = await Promise.all([\n withSpan(\"plan.functionRegistry\", () =>\n planFunctionRegistry(client, workspaceId, application.name, functionEntries),\n ),\n withSpan(\"plan.tailorDB\", () => planTailorDB(ctx)),\n withSpan(\"plan.staticWebsite\", () => planStaticWebsite(ctx)),\n withSpan(\"plan.idp\", () => planIdP(ctx)),\n withSpan(\"plan.auth\", () => planAuth(ctx)),\n withSpan(\"plan.pipeline\", () => planPipeline(ctx)),\n withSpan(\"plan.application\", () => planApplication(ctx)),\n withSpan(\"plan.executor\", () => planExecutor(ctx)),\n withSpan(\"plan.workflow\", () =>\n planWorkflow(\n client,\n workspaceId,\n application.name,\n workflowService?.workflows ?? {},\n workflowBuildResult?.mainJobDeps ?? {},\n ),\n ),\n withSpan(\"plan.secretManager\", () => planSecretManager(ctx)),\n ]);\n return {\n functionRegistry,\n tailorDB,\n staticWebsite,\n idp,\n auth,\n pipeline,\n app,\n executor,\n workflow,\n secretManager,\n };\n });\n\n // Phase 1b: Confirm\n await withSpan(\"confirm\", async () => {\n const allConflicts: OwnerConflict[] = [\n ...functionRegistry.conflicts,\n ...tailorDB.conflicts,\n ...staticWebsite.conflicts,\n ...idp.conflicts,\n ...auth.conflicts,\n ...pipeline.conflicts,\n ...executor.conflicts,\n ...workflow.conflicts,\n ...secretManager.conflicts,\n ];\n await confirmOwnerConflict(allConflicts, application.name, yes);\n\n const allUnmanaged: UnmanagedResource[] = [\n ...functionRegistry.unmanaged,\n ...tailorDB.unmanaged,\n ...staticWebsite.unmanaged,\n ...idp.unmanaged,\n ...auth.unmanaged,\n ...pipeline.unmanaged,\n ...executor.unmanaged,\n ...workflow.unmanaged,\n ...secretManager.unmanaged,\n ];\n await confirmUnmanagedResources(allUnmanaged, application.name, yes);\n\n const importantDeletions: ImportantResourceDeletion[] = [];\n for (const del of tailorDB.changeSet.type.deletes) {\n importantDeletions.push({\n resourceType: \"TailorDB type\",\n resourceName: del.name,\n });\n }\n for (const del of staticWebsite.changeSet.deletes) {\n importantDeletions.push({\n resourceType: \"StaticWebsite\",\n resourceName: del.name,\n });\n }\n for (const del of auth.changeSet.oauth2Client.deletes) {\n importantDeletions.push({\n resourceType: \"OAuth2 client\",\n resourceName: del.name,\n });\n }\n for (const replace of auth.changeSet.oauth2Client.replaces) {\n importantDeletions.push({\n resourceType: \"OAuth2 client (client type change)\",\n resourceName: replace.name,\n });\n }\n for (const del of secretManager.vaultChangeSet.deletes) {\n importantDeletions.push({\n resourceType: \"Secret Manager vault\",\n resourceName: del.name,\n });\n }\n for (const del of secretManager.secretChangeSet.deletes) {\n importantDeletions.push({\n resourceType: \"Secret Manager secret\",\n resourceName: del.name,\n });\n }\n await confirmImportantResourceDeletion(importantDeletions, yes);\n\n // Delete renamed applications\n const resourceOwners = new Set([\n ...functionRegistry.resourceOwners,\n ...tailorDB.resourceOwners,\n ...staticWebsite.resourceOwners,\n ...idp.resourceOwners,\n ...auth.resourceOwners,\n ...pipeline.resourceOwners,\n ...executor.resourceOwners,\n ...workflow.resourceOwners,\n ...secretManager.resourceOwners,\n ]);\n const conflictOwners = new Set(allConflicts.map((c) => c.currentOwner));\n const emptyApps = [...conflictOwners].filter((owner) => !resourceOwners.has(owner));\n for (const emptyApp of emptyApps) {\n app.deletes.push({\n name: emptyApp,\n request: {\n workspaceId,\n applicationName: emptyApp,\n },\n });\n }\n });\n\n if (dryRun) {\n logger.info(\"Dry run enabled. No changes applied.\");\n return;\n }\n\n // Phase 2: Create/Update services that Application depends on\n await withSpan(\"apply.createUpdateServices\", async () => {\n await applySecretManager(client, secretManager, \"create-update\", application);\n await applyFunctionRegistry(client, workspaceId, functionRegistry, \"create-update\");\n await applyStaticWebsite(client, staticWebsite, \"create-update\");\n await applyIdP(client, idp, \"create-update\");\n await applyAuth(client, auth, \"create-update\");\n await applyTailorDB(client, tailorDB, \"create-update\");\n await applyPipeline(client, pipeline, \"create-update\");\n });\n\n // Phase 3: Delete subgraph resources (types, resolvers, etc.) before Application update\n await withSpan(\"apply.deleteSubgraphResources\", async () => {\n await applyPipeline(client, pipeline, \"delete-resources\");\n await applyAuth(client, auth, \"delete-resources\");\n await applyIdP(client, idp, \"delete-resources\");\n });\n\n // Phase 4: Create/Update Application\n await withSpan(\"apply.createUpdateApplication\", () =>\n applyApplication(client, app, \"create-update\"),\n );\n\n // Phase 5: Create/Update services that depend on Application\n await withSpan(\"apply.createUpdateDependentServices\", async () => {\n await applyExecutor(client, executor, \"create-update\");\n await applyWorkflow(client, workflow, \"create-update\");\n });\n\n // Phase 6: Delete services that depend on Application\n await withSpan(\"apply.deleteDependentServices\", async () => {\n await applyWorkflow(client, workflow, \"delete\");\n await applyExecutor(client, executor, \"delete\");\n await applyStaticWebsite(client, staticWebsite, \"delete\");\n await applySecretManager(client, secretManager, \"delete\");\n });\n\n // Phase 7: Delete Application\n await withSpan(\"apply.deleteApplication\", () => applyApplication(client, app, \"delete\"));\n\n // Phase 8: Delete subgraph services (after Application is deleted, no reference errors)\n await withSpan(\"apply.deleteSubgraphServices\", async () => {\n await applyPipeline(client, pipeline, \"delete-services\");\n await applyAuth(client, auth, \"delete-services\");\n await applyIdP(client, idp, \"delete-services\");\n await applyTailorDB(client, tailorDB, \"delete-services\");\n });\n\n // Phase 9: Delete unused function registry entries\n await withSpan(\"apply.cleanup\", () =>\n applyFunctionRegistry(client, workspaceId, functionRegistry, \"delete\"),\n );\n\n logger.success(\"Successfully applied changes.\");\n });\n}\n","import {\n ExecutorJobStatus,\n ExecutorTargetType,\n ExecutorTriggerType,\n} from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { FunctionExecution_Status } from \"@tailor-proto/tailor/v1/function_resource_pb\";\nimport { styles } from \"@/cli/shared/logger\";\n\n// ============================================================================\n// Executor Job Status\n// ============================================================================\n\n/**\n * Colorize executor job status string.\n * @param status - Executor job status string\n * @returns Colorized status string\n */\nexport function colorizeExecutorJobStatus(status: string): string {\n switch (status) {\n case \"PENDING\":\n return styles.dim(status);\n case \"RUNNING\":\n return styles.info(status);\n case \"SUCCESS\":\n return styles.success(status);\n case \"FAILED\":\n return styles.error(status);\n case \"CANCELED\":\n return styles.warning(status);\n default:\n return status;\n }\n}\n\n/**\n * Check if executor job status is terminal.\n * @param status - Executor job status enum value\n * @returns True if status is terminal\n */\nexport function isExecutorJobTerminalStatus(status: ExecutorJobStatus): boolean {\n return (\n status === ExecutorJobStatus.SUCCESS ||\n status === ExecutorJobStatus.FAILED ||\n status === ExecutorJobStatus.CANCELED\n );\n}\n\n/**\n * Parse executor job status string to enum.\n * @param status - Status string to parse\n * @returns ExecutorJobStatus enum value\n */\nexport function parseExecutorJobStatus(status: string): ExecutorJobStatus {\n const upperStatus = status.toUpperCase();\n switch (upperStatus) {\n case \"PENDING\":\n return ExecutorJobStatus.PENDING;\n case \"RUNNING\":\n return ExecutorJobStatus.RUNNING;\n case \"SUCCESS\":\n return ExecutorJobStatus.SUCCESS;\n case \"FAILED\":\n return ExecutorJobStatus.FAILED;\n case \"CANCELED\":\n return ExecutorJobStatus.CANCELED;\n default:\n throw new Error(\n `Invalid status: ${status}. Valid values: PENDING, RUNNING, SUCCESS, FAILED, CANCELED`,\n );\n }\n}\n\n// ============================================================================\n// Function Execution Status\n// ============================================================================\n\n/**\n * Colorize function execution status string.\n * @param status - Function execution status string\n * @returns Colorized status string\n */\nexport function colorizeFunctionExecutionStatus(status: string): string {\n switch (status) {\n case \"RUNNING\":\n return styles.info(status);\n case \"SUCCESS\":\n return styles.success(status);\n case \"FAILED\":\n return styles.error(status);\n default:\n return status;\n }\n}\n\n/**\n * Check if function execution status is terminal.\n * @param status - Function execution status enum value\n * @returns True if status is terminal\n */\nexport function isFunctionExecutionTerminalStatus(status: FunctionExecution_Status): boolean {\n return status === FunctionExecution_Status.SUCCESS || status === FunctionExecution_Status.FAILED;\n}\n\n// ============================================================================\n// Executor Target Type\n// ============================================================================\n\n/**\n * Convert executor target type enum to string.\n * @param targetType - Executor target type enum value\n * @returns Target type string representation\n */\nexport function executorTargetTypeToString(targetType: ExecutorTargetType): string {\n switch (targetType) {\n case ExecutorTargetType.WEBHOOK:\n return \"WEBHOOK\";\n case ExecutorTargetType.TAILOR_GRAPHQL:\n return \"GRAPHQL\";\n case ExecutorTargetType.FUNCTION:\n return \"FUNCTION\";\n case ExecutorTargetType.JOB_FUNCTION:\n return \"JOB_FUNCTION\";\n case ExecutorTargetType.WORKFLOW:\n return \"WORKFLOW\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n\n/**\n * Convert executor trigger type enum to string.\n * @param triggerType - Executor trigger type enum value\n * @returns Trigger type string representation\n */\nexport function executorTriggerTypeToString(triggerType: ExecutorTriggerType): string {\n switch (triggerType) {\n case ExecutorTriggerType.SCHEDULE:\n return \"SCHEDULE\";\n case ExecutorTriggerType.EVENT:\n return \"EVENT\";\n case ExecutorTriggerType.INCOMING_WEBHOOK:\n return \"INCOMING_WEBHOOK\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n","import { timestampDate } from \"@bufbuild/protobuf/wkt\";\nimport { ExecutorJobStatus } from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { executorTargetTypeToString, executorTriggerTypeToString } from \"./status\";\nimport type {\n ExecutorExecutor,\n ExecutorJob,\n ExecutorJobAttempt,\n} from \"@tailor-proto/tailor/v1/executor_resource_pb\";\n\nexport interface ExecutorJobListInfo {\n id: string;\n executorName: string;\n status: string;\n createdAt: string;\n}\n\nexport interface ExecutorJobInfo {\n id: string;\n executorName: string;\n status: string;\n scheduledAt: string;\n createdAt: string;\n updatedAt: string;\n}\n\nexport interface ExecutorJobAttemptInfo {\n id: string;\n jobId: string;\n status: string;\n error: string;\n startedAt: string;\n finishedAt: string;\n operationReference: string;\n}\n\nfunction executorJobStatusToString(status: ExecutorJobStatus): string {\n switch (status) {\n case ExecutorJobStatus.PENDING:\n return \"PENDING\";\n case ExecutorJobStatus.RUNNING:\n return \"RUNNING\";\n case ExecutorJobStatus.SUCCESS:\n return \"SUCCESS\";\n case ExecutorJobStatus.FAILED:\n return \"FAILED\";\n case ExecutorJobStatus.CANCELED:\n return \"CANCELED\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n\n/**\n * Transform ExecutorJob to ExecutorJobListInfo for list display.\n * @param job - ExecutorJob from proto\n * @returns Executor job list info\n */\nexport function toExecutorJobListInfo(job: ExecutorJob): ExecutorJobListInfo {\n return {\n id: job.id,\n executorName: job.executorName,\n status: executorJobStatusToString(job.status),\n createdAt: job.createdAt ? timestampDate(job.createdAt).toISOString() : \"N/A\",\n };\n}\n\n/**\n * Transform ExecutorJob to ExecutorJobInfo for detail display.\n * @param job - ExecutorJob from proto\n * @returns Executor job info\n */\nexport function toExecutorJobInfo(job: ExecutorJob): ExecutorJobInfo {\n return {\n id: job.id,\n executorName: job.executorName,\n status: executorJobStatusToString(job.status),\n scheduledAt: job.scheduledAt ? timestampDate(job.scheduledAt).toISOString() : \"N/A\",\n createdAt: job.createdAt ? timestampDate(job.createdAt).toISOString() : \"N/A\",\n updatedAt: job.updatedAt ? timestampDate(job.updatedAt).toISOString() : \"N/A\",\n };\n}\n\n/**\n * Transform ExecutorJobAttempt to ExecutorJobAttemptInfo.\n * @param attempt - ExecutorJobAttempt from proto\n * @returns Executor job attempt info\n */\nexport function toExecutorJobAttemptInfo(attempt: ExecutorJobAttempt): ExecutorJobAttemptInfo {\n return {\n id: attempt.id,\n jobId: attempt.jobId,\n status: executorJobStatusToString(attempt.status),\n error: attempt.error || \"\",\n startedAt: attempt.startedAt ? timestampDate(attempt.startedAt).toISOString() : \"N/A\",\n finishedAt: attempt.finishedAt ? timestampDate(attempt.finishedAt).toISOString() : \"N/A\",\n operationReference: attempt.operationReference || \"\",\n };\n}\n\n// ============================================================================\n// Executor (ExecutorExecutor) Transform Functions\n// ============================================================================\n\nexport interface ExecutorListInfo {\n name: string;\n triggerType: string;\n targetType: string;\n disabled: boolean;\n}\n\nexport interface ExecutorInfo {\n name: string;\n description: string;\n triggerType: string;\n targetType: string;\n disabled: boolean;\n triggerConfig: string;\n targetConfig: string;\n}\n\n/**\n * Format trigger type for human-readable display.\n * Examples:\n * - event with typeName \"User\" and action \"created\" → \"event: User created\"\n * - event with resolverName \"myResolver\" → \"event: myResolver executed\"\n * - schedule with frequency \"0 12 * * *\" and timezone \"UTC\" → \"schedule: 0 12 * * * (UTC)\"\n * - incomingWebhook → \"webhook\"\n * @param executor - Executor from proto\n * @returns Formatted trigger type string\n */\nfunction formatTriggerType(executor: ExecutorExecutor): string {\n const config = executor.triggerConfig?.config;\n if (!config || config.case === undefined) {\n return executorTriggerTypeToString(executor.triggerType);\n }\n\n switch (config.case) {\n case \"schedule\":\n return `schedule: ${config.value.frequency} (${config.value.timezone})`;\n case \"event\":\n return formatEventTrigger(config.value.eventType, config.value.condition?.expr);\n case \"incomingWebhook\":\n return \"webhook\";\n default:\n return executorTriggerTypeToString(executor.triggerType);\n }\n}\n\n/**\n * Format event trigger for display by parsing condition to extract type/resolver name.\n * @param eventType - Event type string (e.g., \"tailordb.type_record.created\")\n * @param condition - Condition expression that may contain args.typeName or args.resolverName\n * @returns Formatted string (e.g., \"event: User created\")\n */\nfunction formatEventTrigger(eventType: string, condition?: string): string {\n const parts = eventType.split(\".\");\n if (parts.length < 3) {\n return `event: ${eventType}`;\n }\n\n const [service, resource, action] = parts;\n\n // Try to extract name from condition\n if (condition) {\n // Match args.typeName === \"User\" or args.typeName === 'User'\n const typeNameMatch = condition.match(/args\\.typeName\\s*===?\\s*[\"']([^\"']+)[\"']/);\n if (typeNameMatch) {\n return `event: ${typeNameMatch[1]} ${action}`;\n }\n\n // Match args.resolverName === \"myResolver\" or args.resolverName === 'myResolver'\n const resolverNameMatch = condition.match(/args\\.resolverName\\s*===?\\s*[\"']([^\"']+)[\"']/);\n if (resolverNameMatch) {\n return `event: ${resolverNameMatch[1]} ${action}`;\n }\n }\n\n // Fallback: use service, resource and action\n return `event: ${service} ${resource} ${action}`;\n}\n\n/**\n * Format trigger config for display.\n * @param executor - Executor from proto\n * @returns Formatted trigger config\n */\nfunction formatTriggerConfig(executor: ExecutorExecutor): Record<string, unknown> {\n const config = executor.triggerConfig?.config;\n if (!config || config.case === undefined) {\n return {};\n }\n\n switch (config.case) {\n case \"schedule\":\n return {\n timezone: config.value.timezone,\n frequency: config.value.frequency,\n };\n case \"event\":\n return {\n eventType: config.value.eventType,\n condition: config.value.condition?.expr || \"\",\n };\n case \"incomingWebhook\":\n return {\n secret: config.value.secret ? \"***\" : \"\",\n };\n default:\n return {};\n }\n}\n\n/**\n * Format target config for display.\n * @param executor - Executor from proto\n * @returns Formatted target config\n */\nfunction formatTargetConfig(executor: ExecutorExecutor): Record<string, unknown> {\n const config = executor.targetConfig?.config;\n if (!config || config.case === undefined) {\n return {};\n }\n\n switch (config.case) {\n case \"webhook\":\n return {\n url: config.value.url?.expr || \"\",\n headers: config.value.headers.length,\n };\n case \"tailorGraphql\":\n return {\n appName: config.value.appName,\n query: config.value.query,\n };\n case \"function\":\n return {\n name: config.value.name,\n };\n case \"workflow\":\n return {\n workflowName: config.value.workflowName,\n };\n default:\n return {};\n }\n}\n\n/**\n * Transform ExecutorExecutor to ExecutorListInfo for list display.\n * @param executor - Executor from proto\n * @returns Executor list info\n */\nexport function toExecutorListInfo(executor: ExecutorExecutor): ExecutorListInfo {\n return {\n name: executor.name,\n triggerType: formatTriggerType(executor),\n targetType: executorTargetTypeToString(executor.targetType),\n disabled: executor.disabled,\n };\n}\n\n/**\n * Transform ExecutorExecutor to ExecutorInfo for detail display.\n * @param executor - Executor from proto\n * @returns Executor info\n */\nexport function toExecutorInfo(executor: ExecutorExecutor): ExecutorInfo {\n return {\n name: executor.name,\n description: executor.description,\n triggerType: formatTriggerType(executor),\n targetType: executorTargetTypeToString(executor.targetType),\n disabled: executor.disabled,\n triggerConfig: JSON.stringify(formatTriggerConfig(executor), null, 2),\n targetConfig: JSON.stringify(formatTargetConfig(executor), null, 2),\n };\n}\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { type ExecutorInfo, toExecutorInfo } from \"./transform\";\n\ntype ExecutorLike = {\n name: string;\n};\n\nconst nameArgs = {\n name: arg(z.string(), {\n positional: true,\n description: \"Executor name\",\n }),\n};\n\nexport type GetExecutorTypedOptions<E extends ExecutorLike = ExecutorLike> = {\n executor: E;\n workspaceId?: string;\n profile?: string;\n};\n\n/**\n * @deprecated Use GetExecutorTypedOptions instead.\n */\nexport interface GetExecutorOptions {\n name: string;\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * Resolve an executor by name.\n * @param client - Operator client\n * @param workspaceId - Workspace ID\n * @param name - Executor name\n * @returns Resolved executor\n */\nasync function resolveExecutor(\n client: Awaited<ReturnType<typeof initOperatorClient>>,\n workspaceId: string,\n name: string,\n) {\n const { executor } = await client.getExecutorExecutor({\n workspaceId,\n name,\n });\n if (!executor) {\n throw new Error(`Executor '${name}' not found.`);\n }\n return executor;\n}\n\n/**\n * Get an executor by name and return CLI-friendly info.\n * @param options - Executor lookup options\n * @returns Executor information\n */\nexport async function getExecutor<E extends ExecutorLike>(\n options: GetExecutorTypedOptions<E>,\n): Promise<ExecutorInfo>;\nexport async function getExecutor(options: GetExecutorOptions): Promise<ExecutorInfo>;\nexport async function getExecutor<E extends ExecutorLike>(\n options: GetExecutorOptions | GetExecutorTypedOptions<E>,\n): Promise<ExecutorInfo> {\n // Discriminant: legacy options have top-level 'name', typed options use 'executor'.\n const name = \"name\" in options ? options.name : options.executor.name;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n try {\n const executor = await resolveExecutor(client, workspaceId, name);\n return toExecutorInfo(executor);\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Executor '${name}' not found.`);\n }\n throw error;\n }\n}\n\nexport const getCommand = defineCommand({\n name: \"get\",\n description: \"Get executor details\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n ...nameArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const executor = await getExecutor({\n name: args.name,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n logger.out(executor, {\n display: {\n triggerConfig: null,\n targetConfig: null,\n },\n });\n }),\n});\n","import { timestampDate } from \"@bufbuild/protobuf/wkt\";\nimport { formatDistanceToNowStrict } from \"date-fns\";\n// eslint-disable-next-line no-restricted-imports\nimport { getBorderCharacters, table } from \"table\";\nimport type { Timestamp } from \"@bufbuild/protobuf/wkt\";\nimport type { TableUserConfig } from \"table\";\n\n/**\n * Format a protobuf Timestamp to ISO string.\n * @param timestamp - Protobuf timestamp\n * @returns Date object or null if invalid\n */\nexport function formatTimestamp(timestamp: Timestamp | undefined): Date | null {\n if (!timestamp) {\n return null;\n }\n const date = timestampDate(timestamp);\n if (Number.isNaN(date.getTime())) {\n return null;\n }\n return date;\n}\n\n/**\n * Formats a table with consistent single-line border style.\n * Use this instead of importing `table` directly.\n * @param data - Table data\n * @param config - Table configuration\n * @returns Formatted table string\n */\nexport function formatTable(data: unknown[][], config?: TableUserConfig): string {\n return table(data, {\n ...config,\n border: getBorderCharacters(\"norc\"),\n });\n}\n\n/**\n * Formats a key-value table with single-line border style.\n * @param data - Key-value pairs\n * @returns Formatted key-value table string\n */\nexport function formatKeyValueTable(data: [string, string][]): string {\n return formatTable(data, { singleLine: true });\n}\n\n/**\n * Formats a table with headers, using single-line border style.\n * Draws horizontal lines only at top, after header, and bottom.\n * @param headers - Table header labels\n * @param rows - Table rows\n * @returns Formatted table string with headers\n */\nexport function formatTableWithHeaders(headers: string[], rows: string[][]): string {\n return formatTable([headers, ...rows], {\n drawHorizontalLine: (lineIndex, rowCount) => {\n return lineIndex === 0 || lineIndex === 1 || lineIndex === rowCount;\n },\n });\n}\n\n/**\n * Format a 2D array of values into a table string.\n * @param value - Value to format\n * @returns Human-readable string representation\n */\nexport function formatValue(value: unknown): string {\n if (value === null || value === undefined) {\n return \"\";\n }\n if (Array.isArray(value)) {\n return value.map(String).join(\"\\n\");\n }\n if (typeof value === \"object\") {\n return JSON.stringify(value, null, 2);\n }\n return String(value);\n}\n\n/**\n * Format a Date or ISO timestamp string as a human-readable relative time.\n * @param value - Date object, ISO date string, or null\n * @returns Relative time (e.g., \"5 minutes ago\") or \"N/A\" for null/invalid\n */\nexport function humanizeRelativeTime(value: Date | string | null): string {\n if (value === null) {\n return \"N/A\";\n }\n const date = value instanceof Date ? value : new Date(value);\n if (Number.isNaN(date.getTime())) {\n return typeof value === \"string\" ? value : \"N/A\";\n }\n return formatDistanceToNowStrict(date, { addSuffix: true });\n}\n","import { FunctionExecution_Status } from \"@tailor-proto/tailor/v1/function_resource_pb\";\n\n/**\n * Convert function execution status enum to string.\n * @param status - Function execution status enum value\n * @returns Status string representation\n */\nexport function functionExecutionStatusToString(status: FunctionExecution_Status): string {\n switch (status) {\n case FunctionExecution_Status.RUNNING:\n return \"RUNNING\";\n case FunctionExecution_Status.SUCCESS:\n return \"SUCCESS\";\n case FunctionExecution_Status.FAILED:\n return \"FAILED\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n","import { arg } from \"politty\";\nimport { z } from \"zod\";\nimport { durationArg } from \"@/cli/shared/args\";\n\ntype ArgsShape = Record<string, z.ZodType>;\n\nexport const nameArgs = {\n name: arg(z.string(), {\n positional: true,\n description: \"Workflow name\",\n }),\n} satisfies ArgsShape;\n\nexport const waitArgs = {\n wait: arg(z.boolean().default(false), {\n alias: \"W\",\n description: \"Wait for execution to complete\",\n }),\n interval: arg(durationArg.default(\"3s\"), {\n alias: \"i\",\n description: \"Polling interval when using --wait (e.g., '3s', '500ms', '1m')\",\n }),\n logs: arg(z.boolean().default(false), {\n alias: \"l\",\n description: \"Display job execution logs after completion (requires --wait)\",\n }),\n} satisfies ArgsShape;\n","import { WorkflowExecution_Status } from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\n\n/**\n * Check if workflow execution status is terminal.\n * @param status - Workflow execution status enum value\n * @returns True if status is terminal\n */\nexport function isWorkflowExecutionTerminalStatus(status: WorkflowExecution_Status): boolean {\n return (\n status === WorkflowExecution_Status.SUCCESS ||\n status === WorkflowExecution_Status.FAILED ||\n status === WorkflowExecution_Status.PENDING_RESUME\n );\n}\n","import { timestampDate } from \"@bufbuild/protobuf/wkt\";\nimport {\n WorkflowExecution_Status,\n WorkflowJobExecution_Status,\n} from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\nimport type {\n Workflow,\n WorkflowExecution,\n WorkflowJobExecution,\n} from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\n\nexport interface WorkflowListInfo {\n name: string;\n mainJob: string;\n jobFunctions: number;\n updatedAt: Date | null;\n}\n\nexport interface WorkflowInfo {\n name: string;\n id: string;\n mainJob: string;\n jobFunctions: Record<string, string>;\n createdAt: Date | null;\n updatedAt: Date | null;\n}\n\nexport interface WorkflowJobExecutionInfo {\n id: string;\n stackedJobName: string;\n status: string;\n executionId: string;\n startedAt: Date | null;\n finishedAt: Date | null;\n}\n\nexport interface WorkflowExecutionInfo {\n id: string;\n workflowName: string;\n status: string;\n jobExecutions: number;\n startedAt: Date | null;\n finishedAt: Date | null;\n}\n\n/**\n * Convert a workflow execution status enum to a string.\n * @param status - Workflow execution status\n * @returns String representation of the status\n */\nfunction workflowExecutionStatusToString(status: WorkflowExecution_Status): string {\n switch (status) {\n case WorkflowExecution_Status.PENDING:\n return \"PENDING\";\n case WorkflowExecution_Status.PENDING_RESUME:\n return \"PENDING_RESUME\";\n case WorkflowExecution_Status.RUNNING:\n return \"RUNNING\";\n case WorkflowExecution_Status.SUCCESS:\n return \"SUCCESS\";\n case WorkflowExecution_Status.FAILED:\n return \"FAILED\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n\n/**\n * Convert a workflow job execution status enum to a string.\n * @param status - Workflow job execution status\n * @returns String representation of the status\n */\nfunction workflowJobExecutionStatusToString(status: WorkflowJobExecution_Status): string {\n switch (status) {\n case WorkflowJobExecution_Status.RUNNING:\n return \"RUNNING\";\n case WorkflowJobExecution_Status.SUSPEND:\n return \"SUSPEND\";\n case WorkflowJobExecution_Status.SUCCESS:\n return \"SUCCESS\";\n case WorkflowJobExecution_Status.FAILED:\n return \"FAILED\";\n default:\n return \"UNSPECIFIED\";\n }\n}\n\n/**\n * Convert a Workflow proto to CLI-friendly list info.\n * @param workflow - Workflow resource\n * @returns Flattened workflow list info\n */\nexport function toWorkflowListInfo(workflow: Workflow): WorkflowListInfo {\n return {\n name: workflow.name,\n mainJob: workflow.mainJobFunctionName,\n jobFunctions: Object.keys(workflow.jobFunctions).length,\n updatedAt: workflow.updatedAt ? timestampDate(workflow.updatedAt) : null,\n };\n}\n\n/**\n * Convert a Workflow proto to detailed workflow info for CLI output.\n * @param workflow - Workflow resource\n * @returns Detailed workflow info\n */\nexport function toWorkflowInfo(workflow: Workflow): WorkflowInfo {\n const jobFunctions: Record<string, string> = {};\n for (const [name, version] of Object.entries(workflow.jobFunctions)) {\n jobFunctions[name] = version.toString();\n }\n\n return {\n name: workflow.name,\n id: workflow.id,\n mainJob: workflow.mainJobFunctionName,\n jobFunctions: jobFunctions,\n createdAt: workflow.createdAt ? timestampDate(workflow.createdAt) : null,\n updatedAt: workflow.updatedAt ? timestampDate(workflow.updatedAt) : null,\n };\n}\n\n/**\n * Convert a WorkflowJobExecution proto to CLI-friendly job execution info.\n * @param jobExecution - Workflow job execution resource\n * @returns Flattened job execution info\n */\nexport function toWorkflowJobExecutionInfo(\n jobExecution: WorkflowJobExecution,\n): WorkflowJobExecutionInfo {\n return {\n id: jobExecution.id,\n stackedJobName: jobExecution.stackedJobName,\n status: workflowJobExecutionStatusToString(jobExecution.status),\n executionId: jobExecution.executionId,\n startedAt: jobExecution.startedAt ? timestampDate(jobExecution.startedAt) : null,\n finishedAt: jobExecution.finishedAt ? timestampDate(jobExecution.finishedAt) : null,\n };\n}\n\n/**\n * Convert a WorkflowExecution proto to CLI-friendly execution info.\n * @param execution - Workflow execution resource\n * @returns Flattened execution info\n */\nexport function toWorkflowExecutionInfo(execution: WorkflowExecution): WorkflowExecutionInfo {\n return {\n id: execution.id,\n workflowName: execution.workflowName,\n status: workflowExecutionStatusToString(execution.status),\n jobExecutions: execution.jobExecutions.length,\n startedAt: execution.startedAt ? timestampDate(execution.startedAt) : null,\n finishedAt: execution.finishedAt ? timestampDate(execution.finishedAt) : null,\n };\n}\n","import { create } from \"@bufbuild/protobuf\";\nimport {\n Condition_Operator,\n ConditionSchema,\n FilterSchema,\n PageDirection,\n} from \"@tailor-proto/tailor/v1/resource_pb\";\nimport { WorkflowExecution_Status } from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\nimport ora from \"ora\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n jsonArgs,\n parseDuration,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { formatKeyValueTable } from \"@/cli/shared/format\";\nimport { styles, logger } from \"@/cli/shared/logger\";\nimport { waitArgs } from \"./args\";\nimport { isWorkflowExecutionTerminalStatus } from \"./status\";\nimport {\n type WorkflowExecutionInfo,\n type WorkflowJobExecutionInfo,\n toWorkflowExecutionInfo,\n toWorkflowJobExecutionInfo,\n} from \"./transform\";\nimport type { FunctionExecution } from \"@tailor-proto/tailor/v1/function_resource_pb\";\n\ntype WorkflowLike = {\n name: string;\n};\n\nexport type ListWorkflowExecutionsTypedOptions<W extends WorkflowLike = WorkflowLike> = {\n workflow?: W;\n status?: string;\n workspaceId?: string;\n profile?: string;\n};\n\n/**\n * @deprecated Use ListWorkflowExecutionsTypedOptions instead.\n */\nexport interface ListWorkflowExecutionsOptions {\n workspaceId?: string;\n profile?: string;\n workflowName?: string;\n status?: string;\n}\n\nexport interface GetWorkflowExecutionOptions {\n executionId: string;\n workspaceId?: string;\n profile?: string;\n interval?: number;\n logs?: boolean;\n}\n\nexport interface WorkflowExecutionDetailInfo extends WorkflowExecutionInfo {\n jobDetails?: (WorkflowJobExecutionInfo & {\n logs?: string;\n result?: string;\n })[];\n}\n\nexport interface GetWorkflowExecutionResult {\n execution: WorkflowExecutionDetailInfo;\n wait: () => Promise<WorkflowExecutionDetailInfo>;\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\nfunction formatTime(date: Date): string {\n return date.toLocaleTimeString(\"en-US\", { hour12: false });\n}\n\nfunction colorizeStatus(status: WorkflowExecution_Status): string {\n const statusText = WorkflowExecution_Status[status];\n switch (status) {\n case WorkflowExecution_Status.PENDING:\n return styles.dim(statusText);\n case WorkflowExecution_Status.PENDING_RESUME:\n return styles.warning(statusText);\n case WorkflowExecution_Status.RUNNING:\n return styles.info(statusText);\n case WorkflowExecution_Status.SUCCESS:\n return styles.success(statusText);\n case WorkflowExecution_Status.FAILED:\n return styles.error(statusText);\n default:\n return statusText;\n }\n}\n\nfunction parseStatus(status: string): WorkflowExecution_Status {\n const upperStatus = status.toUpperCase();\n switch (upperStatus) {\n case \"PENDING\":\n return WorkflowExecution_Status.PENDING;\n case \"PENDING_RESUME\":\n return WorkflowExecution_Status.PENDING_RESUME;\n case \"RUNNING\":\n return WorkflowExecution_Status.RUNNING;\n case \"SUCCESS\":\n return WorkflowExecution_Status.SUCCESS;\n case \"FAILED\":\n return WorkflowExecution_Status.FAILED;\n default:\n throw new Error(\n `Invalid status: ${status}. Valid values: PENDING, PENDING_RESUME, RUNNING, SUCCESS, FAILED`,\n );\n }\n}\n\n/**\n * List workflow executions with optional filters.\n * @param options - Workflow execution listing options\n * @returns List of workflow executions\n */\nexport async function listWorkflowExecutions<W extends WorkflowLike>(\n options?: ListWorkflowExecutionsTypedOptions<W>,\n): Promise<WorkflowExecutionInfo[]>;\nexport async function listWorkflowExecutions(\n options?: ListWorkflowExecutionsOptions,\n): Promise<WorkflowExecutionInfo[]>;\nexport async function listWorkflowExecutions<W extends WorkflowLike>(\n options?: ListWorkflowExecutionsOptions | ListWorkflowExecutionsTypedOptions<W>,\n): Promise<WorkflowExecutionInfo[]> {\n // Discriminant: legacy options have 'workflowName', typed options use 'workflow'.\n // Note: since ListWorkflowExecutionsTypedOptions has all optional fields, TypeScript may\n // resolve a legacy-typed variable to the typed overload (skipping excess property checks).\n // Runtime behavior is correct regardless because the discriminant handles both shapes.\n const workflowName =\n options && \"workflowName\" in options\n ? options.workflowName\n : options && \"workflow\" in options\n ? options.workflow?.name\n : undefined;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const filters: ReturnType<typeof create<typeof FilterSchema>>[] = [];\n\n if (workflowName) {\n filters.push(\n create(FilterSchema, {\n condition: create(ConditionSchema, {\n field: \"workflow_name\",\n operator: Condition_Operator.EQ,\n value: { kind: { case: \"stringValue\", value: workflowName } },\n }),\n }),\n );\n }\n\n if (options?.status) {\n const statusValue = parseStatus(options.status);\n filters.push(\n create(FilterSchema, {\n condition: create(ConditionSchema, {\n field: \"status\",\n operator: Condition_Operator.EQ,\n value: { kind: { case: \"numberValue\", value: statusValue } },\n }),\n }),\n );\n }\n\n const filter =\n filters.length > 0\n ? create(FilterSchema, {\n and: filters,\n })\n : undefined;\n\n const executions = await fetchAll(async (pageToken, maxPageSize) => {\n const { executions, nextPageToken } = await client.listWorkflowExecutions({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n pageDirection: PageDirection.DESC,\n filter,\n });\n return [executions, nextPageToken];\n });\n\n return executions.map(toWorkflowExecutionInfo);\n}\n\n/**\n * Get a single workflow execution with optional logs.\n * @param options - Workflow execution lookup options\n * @returns Workflow execution with optional logs\n */\nexport async function getWorkflowExecution(\n options: GetWorkflowExecutionOptions,\n): Promise<GetWorkflowExecutionResult> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n async function fetchFunctionExecution(\n functionExecutionId: string,\n ): Promise<FunctionExecution | undefined> {\n try {\n const filter = create(FilterSchema, {\n condition: create(ConditionSchema, {\n field: \"id\",\n operator: Condition_Operator.EQ,\n value: { kind: { case: \"stringValue\", value: functionExecutionId } },\n }),\n });\n\n const response = await client.listFunctionExecutions({\n workspaceId,\n filter,\n pageSize: 1,\n });\n\n return response.executions[0];\n } catch {\n return undefined;\n }\n }\n\n async function fetchExecutionWithLogs(\n executionId: string,\n includeLogs: boolean,\n ): Promise<WorkflowExecutionDetailInfo> {\n const { execution } = await client.getWorkflowExecution({\n workspaceId,\n executionId,\n });\n\n if (!execution) {\n throw new Error(`Execution '${executionId}' not found.`);\n }\n\n const result: WorkflowExecutionDetailInfo = toWorkflowExecutionInfo(execution);\n\n if (includeLogs && execution.jobExecutions.length > 0) {\n result.jobDetails = await Promise.all(\n execution.jobExecutions.map(async (job) => {\n const jobInfo = toWorkflowJobExecutionInfo(job);\n if (job.executionId) {\n const functionExecution = await fetchFunctionExecution(job.executionId);\n if (functionExecution) {\n return {\n ...jobInfo,\n logs: functionExecution.logs || undefined,\n result: functionExecution.result || undefined,\n };\n }\n }\n return jobInfo;\n }),\n );\n }\n\n return result;\n }\n\n async function waitForCompletion(): Promise<WorkflowExecutionDetailInfo> {\n const interval = options.interval ?? 3000;\n\n while (true) {\n const { execution } = await client.getWorkflowExecution({\n workspaceId,\n executionId: options.executionId,\n });\n\n if (!execution) {\n throw new Error(`Execution '${options.executionId}' not found.`);\n }\n\n // Terminal states (SUCCESS, FAILED, PENDING_RESUME)\n if (isWorkflowExecutionTerminalStatus(execution.status)) {\n return await fetchExecutionWithLogs(options.executionId, options.logs ?? false);\n }\n\n await sleep(interval);\n }\n }\n\n const execution = await fetchExecutionWithLogs(options.executionId, options.logs ?? false);\n\n return {\n execution,\n wait: waitForCompletion,\n };\n}\n\nasync function waitWithSpinner(\n waitFn: () => Promise<WorkflowExecutionDetailInfo>,\n interval: number,\n json: boolean,\n): Promise<WorkflowExecutionDetailInfo> {\n const spinner = !json ? ora().start(\"Waiting for workflow to complete...\") : null;\n\n const updateInterval = setInterval(() => {\n if (spinner) {\n const now = formatTime(new Date());\n spinner.text = `Waiting for workflow to complete... (${now})`;\n }\n }, interval);\n\n try {\n const result = await waitFn();\n const coloredStatus = colorizeStatus(\n WorkflowExecution_Status[result.status as keyof typeof WorkflowExecution_Status],\n );\n if (result.status === \"SUCCESS\") {\n spinner?.succeed(`Completed: ${coloredStatus}`);\n } else {\n spinner?.fail(`Completed: ${coloredStatus}`);\n }\n return result;\n } finally {\n clearInterval(updateInterval);\n spinner?.stop();\n }\n}\n\n/**\n * Print a workflow execution and its logs in a human-readable format.\n * @param execution - Workflow execution detail info\n */\nexport function printExecutionWithLogs(execution: WorkflowExecutionDetailInfo): void {\n // Helper to format Date as ISO string or \"N/A\"\n const formatDate = (date: Date | null): string => (date ? date.toISOString() : \"N/A\");\n\n // Print execution summary\n const summaryData: [string, string][] = [\n [\"id\", execution.id],\n [\"workflowName\", execution.workflowName],\n [\"status\", execution.status],\n [\"jobExecutions\", execution.jobExecutions.toString()],\n [\"startedAt\", formatDate(execution.startedAt)],\n [\"finishedAt\", formatDate(execution.finishedAt)],\n ];\n logger.out(formatKeyValueTable(summaryData));\n\n // Print job details with logs\n if (execution.jobDetails && execution.jobDetails.length > 0) {\n logger.log(styles.bold(\"\\nJob Executions:\"));\n for (const job of execution.jobDetails) {\n logger.log(styles.info(`\\n--- ${job.stackedJobName} ---`));\n logger.log(` Status: ${job.status}`);\n logger.log(` Started: ${formatDate(job.startedAt)}`);\n logger.log(` Finished: ${formatDate(job.finishedAt)}`);\n\n if (job.logs) {\n logger.log(styles.warning(\"\\n Logs:\"));\n const logLines = job.logs.split(\"\\n\");\n for (const line of logLines) {\n logger.log(` ${line}`);\n }\n }\n\n if (job.result) {\n logger.log(styles.success(\"\\n Result:\"));\n try {\n const parsed = JSON.parse(job.result);\n logger.log(` ${JSON.stringify(parsed, null, 2).split(\"\\n\").join(\"\\n \")}`);\n } catch {\n logger.log(` ${job.result}`);\n }\n }\n }\n }\n}\n\nexport const executionsCommand = defineCommand({\n name: \"executions\",\n description: \"List or get workflow executions.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n executionId: arg(z.string().optional(), {\n positional: true,\n description: \"Execution ID (if provided, shows details)\",\n }),\n \"workflow-name\": arg(z.string().optional(), {\n alias: \"n\",\n description: \"Filter by workflow name (list mode only)\",\n }),\n status: arg(z.string().optional(), {\n alias: \"s\",\n description: \"Filter by status (list mode only)\",\n }),\n ...waitArgs,\n logs: arg(z.boolean().default(false), {\n description: \"Display job execution logs (detail mode only)\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n if (args.executionId) {\n const interval = parseDuration(args.interval);\n const { execution, wait } = await getWorkflowExecution({\n executionId: args.executionId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n interval,\n logs: args.logs,\n });\n\n if (!args.json) {\n logger.info(`Execution ID: ${execution.id}`, { mode: \"stream\" });\n }\n\n const result = args.wait ? await waitWithSpinner(wait, interval, args.json) : execution;\n\n if (args.logs && !args.json) {\n printExecutionWithLogs(result);\n } else {\n logger.out(result);\n }\n } else {\n const executions = await listWorkflowExecutions({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n workflowName: args[\"workflow-name\"],\n status: args.status,\n });\n logger.out(executions);\n }\n }),\n});\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { nameArgs } from \"./args\";\nimport { type WorkflowInfo, toWorkflowInfo } from \"./transform\";\n\ntype WorkflowLike = {\n name: string;\n};\n\nexport type GetWorkflowTypedOptions<W extends WorkflowLike = WorkflowLike> = {\n workflow: W;\n workspaceId?: string;\n profile?: string;\n};\n\n/**\n * @deprecated Use GetWorkflowTypedOptions instead.\n */\nexport interface GetWorkflowOptions {\n name: string;\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * Resolve a workflow definition by name.\n * @param client - Operator client\n * @param workspaceId - Workspace ID\n * @param name - Workflow name\n * @returns Resolved workflow\n */\nexport async function resolveWorkflow(\n client: Awaited<ReturnType<typeof initOperatorClient>>,\n workspaceId: string,\n name: string,\n) {\n const { workflow } = await client.getWorkflowByName({\n workspaceId,\n workflowName: name,\n });\n if (!workflow) {\n throw new Error(`Workflow '${name}' not found.`);\n }\n return workflow;\n}\n\n/**\n * Get a workflow by name and return CLI-friendly info.\n * @param options - Workflow lookup options\n * @returns Workflow information\n */\nexport async function getWorkflow<W extends WorkflowLike>(\n options: GetWorkflowTypedOptions<W>,\n): Promise<WorkflowInfo>;\nexport async function getWorkflow(options: GetWorkflowOptions): Promise<WorkflowInfo>;\nexport async function getWorkflow<W extends WorkflowLike>(\n options: GetWorkflowOptions | GetWorkflowTypedOptions<W>,\n): Promise<WorkflowInfo> {\n // Discriminant: legacy options have top-level 'name', typed options use 'workflow'.\n // Note: passing a workflow object directly (e.g., getWorkflow(myWorkflow)) would match\n // the legacy branch due to structural typing, but still works correctly since it reads .name.\n const name = \"name\" in options ? options.name : options.workflow.name;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n try {\n const workflow = await resolveWorkflow(client, workspaceId, name);\n return toWorkflowInfo(workflow);\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Workflow '${name}' not found.`);\n }\n throw error;\n }\n}\n\nexport const getCommand = defineCommand({\n name: \"get\",\n description: \"Get workflow details.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n ...nameArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const workflow = await getWorkflow({\n name: args.name,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n logger.out(workflow);\n }),\n});\n","import { create } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport { AuthInvokerSchema } from \"@tailor-proto/tailor/v1/auth_resource_pb\";\nimport {\n WorkflowExecution_Status,\n WorkflowJobExecution_Status,\n} from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\nimport ora from \"ora\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n deploymentArgs,\n jsonArgs,\n parseDuration,\n withCommonArgs,\n} from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { nameArgs, waitArgs } from \"./args\";\nimport { getWorkflowExecution, printExecutionWithLogs } from \"./executions\";\nimport { resolveWorkflow } from \"./get\";\nimport { type WorkflowExecutionInfo, toWorkflowExecutionInfo } from \"./transform\";\nimport type { WorkflowExecution } from \"@tailor-proto/tailor/v1/workflow_resource_pb\";\nimport type { Jsonifiable } from \"type-fest\";\n\ntype WorkflowLike = {\n name: string;\n mainJob: {\n body: unknown;\n };\n};\n\ntype AuthInvoker<M extends string = string> = {\n namespace: string;\n machineUserName: M;\n};\n\ntype WorkflowInput<W extends WorkflowLike> = W extends WorkflowLike\n ? W[\"mainJob\"][\"body\"] extends (...args: infer Args) => unknown\n ? Args[0]\n : never\n : never;\n\ntype StartWorkflowArgOptionForSingleWorkflow<W extends WorkflowLike> = WorkflowLike extends W\n ? { arg?: Jsonifiable }\n : undefined extends WorkflowInput<W>\n ? { arg?: WorkflowInput<W> }\n : { arg: WorkflowInput<W> };\n\ntype StartWorkflowArgOption<W extends WorkflowLike> = W extends WorkflowLike\n ? StartWorkflowArgOptionForSingleWorkflow<W>\n : never;\n\n/**\n * @deprecated Use StartWorkflowTypedOptions instead.\n */\nexport interface StartWorkflowOptions {\n name: string;\n machineUser: string;\n arg?: Jsonifiable;\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n interval?: number;\n}\n\ntype StartWorkflowTypedBaseOptions<W extends WorkflowLike> = {\n workflow: W;\n authInvoker: AuthInvoker<string>;\n workspaceId?: string;\n profile?: string;\n interval?: number;\n};\n\nexport type StartWorkflowTypedOptions<W extends WorkflowLike = WorkflowLike> =\n W extends WorkflowLike ? StartWorkflowTypedBaseOptions<W> & StartWorkflowArgOption<W> : never;\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\nfunction formatTime(date: Date): string {\n return date.toLocaleTimeString(\"en-US\", { hour12: false });\n}\n\nfunction colorizeStatus(status: WorkflowExecution_Status): string {\n const statusText = WorkflowExecution_Status[status];\n switch (status) {\n case WorkflowExecution_Status.PENDING:\n return styles.dim(statusText);\n case WorkflowExecution_Status.PENDING_RESUME:\n return styles.warning(statusText);\n case WorkflowExecution_Status.RUNNING:\n return styles.info(statusText);\n case WorkflowExecution_Status.SUCCESS:\n return styles.success(statusText);\n case WorkflowExecution_Status.FAILED:\n return styles.error(statusText);\n default:\n return statusText;\n }\n}\n\nexport interface WaitForExecutionOptions {\n client: Awaited<ReturnType<typeof initOperatorClient>>;\n workspaceId: string;\n executionId: string;\n interval: number;\n showProgress?: boolean;\n trackJobs?: boolean;\n}\n\n/**\n * Wait for a workflow execution to reach a terminal state, optionally showing progress.\n * @param options - Wait options\n * @returns Final workflow execution info\n */\nexport async function waitForExecution(\n options: WaitForExecutionOptions,\n): Promise<WorkflowExecutionInfo> {\n const { client, workspaceId, executionId, interval, showProgress, trackJobs } = options;\n\n let lastStatus: WorkflowExecution_Status | undefined;\n let lastRunningJobs: string | undefined;\n const spinner = showProgress\n ? ora({\n indent: 2,\n }).start(\"Waiting for workflow to complete...\")\n : null;\n\n try {\n while (true) {\n const { execution } = await client.getWorkflowExecution({\n workspaceId,\n executionId,\n });\n\n if (!execution) {\n spinner?.fail(`Execution '${executionId}' not found.`);\n throw new Error(`Execution '${executionId}' not found.`);\n }\n\n const now = formatTime(new Date());\n const coloredStatus = colorizeStatus(execution.status);\n\n // Show workflow status change (persist previous line)\n if (execution.status !== lastStatus) {\n if (showProgress) {\n spinner?.stop();\n logger.info(`Status: ${coloredStatus}`, {\n mode: \"stream\",\n indent: 2,\n });\n spinner?.start(`Waiting for workflow to complete...`);\n }\n lastStatus = execution.status;\n }\n\n // Show job execution details when running (optional)\n if (trackJobs && execution.status === WorkflowExecution_Status.RUNNING) {\n const runningJobs = getRunningJobs(execution);\n if (runningJobs && runningJobs !== lastRunningJobs) {\n if (showProgress) {\n spinner?.stop();\n logger.info(`Job | ${runningJobs}: ${coloredStatus}`, {\n mode: \"stream\",\n indent: 2,\n });\n spinner?.start(`Waiting for workflow to complete...`);\n }\n lastRunningJobs = runningJobs;\n }\n }\n\n if (spinner) {\n spinner.text = `Waiting for workflow to complete... (${now})`;\n }\n\n // Terminal states: SUCCESS, FAILED, or PENDING_RESUME\n if (isTerminalStatus(execution.status)) {\n if (execution.status === WorkflowExecution_Status.SUCCESS) {\n spinner?.succeed(`Completed: ${coloredStatus}`);\n } else if (execution.status === WorkflowExecution_Status.FAILED) {\n spinner?.fail(`Completed: ${coloredStatus}`);\n } else {\n spinner?.warn(`Completed: ${coloredStatus}`);\n }\n return toWorkflowExecutionInfo(execution);\n }\n\n await sleep(interval);\n }\n } catch (error) {\n spinner?.stop();\n throw error;\n }\n}\n\nfunction getRunningJobs(execution: WorkflowExecution): string {\n return execution.jobExecutions\n .filter((job) => job.status === WorkflowJobExecution_Status.RUNNING)\n .map((job) => job.stackedJobName)\n .join(\", \");\n}\n\nfunction isTerminalStatus(status: WorkflowExecution_Status): boolean {\n return (\n status === WorkflowExecution_Status.SUCCESS ||\n status === WorkflowExecution_Status.FAILED ||\n status === WorkflowExecution_Status.PENDING_RESUME\n );\n}\n\nexport interface WaitOptions {\n showProgress?: boolean;\n}\n\nexport interface StartWorkflowResultWithWait {\n executionId: string;\n wait: (options?: WaitOptions) => Promise<WorkflowExecutionInfo>;\n}\n\ninterface StartWorkflowCoreOptions {\n client: Awaited<ReturnType<typeof initOperatorClient>>;\n workspaceId: string;\n workflowName: string;\n authInvoker: AuthInvoker<string>;\n arg?: unknown;\n interval?: number;\n}\n\nasync function startWorkflowCore(\n options: StartWorkflowCoreOptions,\n): Promise<StartWorkflowResultWithWait> {\n const { client, workspaceId, workflowName } = options;\n\n try {\n const workflow = await resolveWorkflow(client, workspaceId, workflowName);\n const authInvoker = create(AuthInvokerSchema, options.authInvoker);\n const arg =\n options.arg === undefined\n ? undefined\n : typeof options.arg === \"string\"\n ? options.arg\n : JSON.stringify(options.arg);\n\n const { executionId } = await client.testStartWorkflow({\n workspaceId,\n workflowId: workflow.id,\n authInvoker,\n arg,\n });\n\n return {\n executionId,\n wait: (waitOptions?: WaitOptions) =>\n waitForExecution({\n client,\n workspaceId,\n executionId,\n interval: options.interval ?? 3000,\n showProgress: waitOptions?.showProgress,\n trackJobs: true,\n }),\n };\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Workflow '${workflowName}' not found.`);\n }\n throw error;\n }\n}\n\nasync function startWorkflowByName(\n options: StartWorkflowOptions,\n): Promise<StartWorkflowResultWithWait> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n const { config } = await loadConfig(options.configPath);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n return await startWorkflowCore({\n client,\n workspaceId,\n workflowName: options.name,\n authInvoker: {\n namespace: application.authNamespace,\n machineUserName: options.machineUser,\n },\n arg: options.arg,\n interval: options.interval,\n });\n}\n\n/**\n * Start a workflow and return a handle to wait for completion.\n * @param options - Start options\n * @returns Start result with wait helper\n */\nexport async function startWorkflow<W extends WorkflowLike>(\n options: StartWorkflowTypedOptions<W>,\n): Promise<StartWorkflowResultWithWait>;\nexport async function startWorkflow(\n options: StartWorkflowOptions,\n): Promise<StartWorkflowResultWithWait>;\nexport async function startWorkflow<W extends WorkflowLike>(\n options: StartWorkflowOptions | StartWorkflowTypedOptions<W>,\n): Promise<StartWorkflowResultWithWait> {\n // Keep backward compatibility: if both legacy and typed keys are present, prefer legacy shape.\n if (\"name\" in options) {\n return await startWorkflowByName(options);\n }\n\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n return await startWorkflowCore({\n client,\n workspaceId,\n workflowName: options.workflow.name,\n authInvoker: options.authInvoker,\n arg: options.arg,\n interval: options.interval,\n });\n}\n\nexport const startCommand = defineCommand({\n name: \"start\",\n description: \"Start a workflow execution.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n ...nameArgs,\n machineuser: arg(z.string(), {\n alias: \"m\",\n description: \"Machine user name\",\n }),\n arg: arg(z.string().optional(), {\n alias: \"a\",\n description: \"Workflow argument (JSON string)\",\n }),\n ...waitArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const { executionId, wait } = await startWorkflowByName({\n name: args.name,\n machineUser: args.machineuser,\n arg: args.arg,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n interval: parseDuration(args.interval),\n });\n\n logger.info(`Execution ID: ${executionId}`, { mode: \"stream\" });\n\n if (args.wait) {\n const result = await wait({ showProgress: true });\n if (args.logs && !args.json) {\n const { execution } = await getWorkflowExecution({\n executionId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n logs: true,\n });\n printExecutionWithLogs(execution);\n } else {\n logger.out(result);\n }\n } else {\n logger.out({ executionId });\n }\n }),\n});\n","import { setTimeout } from \"timers/promises\";\nimport { create } from \"@bufbuild/protobuf\";\nimport { Code, ConnectError } from \"@connectrpc/connect\";\nimport {\n ExecutorJobStatus,\n ExecutorTargetType,\n} from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { FunctionExecution_Status } from \"@tailor-proto/tailor/v1/function_resource_pb\";\nimport {\n Condition_Operator,\n ConditionSchema,\n FilterSchema,\n PageDirection,\n} from \"@tailor-proto/tailor/v1/resource_pb\";\nimport ora from \"ora\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n durationArg,\n jsonArgs,\n parseDuration,\n positiveIntArg,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { formatKeyValueTable } from \"@/cli/shared/format\";\nimport { functionExecutionStatusToString } from \"@/cli/shared/function-execution\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { getWorkflowExecution } from \"../workflow/executions\";\nimport { waitForExecution } from \"../workflow/start\";\nimport {\n colorizeExecutorJobStatus,\n colorizeFunctionExecutionStatus,\n executorTargetTypeToString,\n isFunctionExecutionTerminalStatus,\n isExecutorJobTerminalStatus,\n parseExecutorJobStatus,\n} from \"./status\";\nimport {\n type ExecutorJobListInfo,\n type ExecutorJobInfo,\n type ExecutorJobAttemptInfo,\n toExecutorJobListInfo,\n toExecutorJobInfo,\n toExecutorJobAttemptInfo,\n} from \"./transform\";\n\ntype ExecutorLike = {\n name: string;\n};\n\nexport type ListExecutorJobsTypedOptions<E extends ExecutorLike = ExecutorLike> = {\n executor: E;\n status?: string;\n limit?: number;\n workspaceId?: string;\n profile?: string;\n};\n\nexport type GetExecutorJobTypedOptions<E extends ExecutorLike = ExecutorLike> = {\n executor: E;\n jobId: string;\n attempts?: boolean;\n workspaceId?: string;\n profile?: string;\n};\n\nexport type WatchExecutorJobTypedOptions<E extends ExecutorLike = ExecutorLike> = {\n executor: E;\n jobId: string;\n workspaceId?: string;\n profile?: string;\n interval?: number;\n logs?: boolean;\n};\n\n/**\n * @deprecated Use ListExecutorJobsTypedOptions instead.\n */\nexport interface ListExecutorJobsOptions {\n executorName: string;\n status?: string;\n limit?: number;\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * @deprecated Use GetExecutorJobTypedOptions instead.\n */\nexport interface GetExecutorJobOptions {\n executorName: string;\n jobId: string;\n attempts?: boolean;\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * @deprecated Use WatchExecutorJobTypedOptions instead.\n */\nexport interface WatchExecutorJobOptions {\n executorName: string;\n jobId: string;\n workspaceId?: string;\n profile?: string;\n interval?: number;\n logs?: boolean;\n}\n\nexport interface ExecutorJobDetailInfo extends ExecutorJobInfo {\n attempts?: ExecutorJobAttemptInfo[];\n}\n\nexport interface WorkflowJobLog {\n jobName: string;\n logs?: string;\n result?: string;\n}\n\nexport interface WatchExecutorJobResult {\n job: ExecutorJobDetailInfo;\n targetType: string;\n workflowExecutionId?: string;\n workflowStatus?: string;\n workflowJobLogs?: WorkflowJobLog[];\n functionExecutionId?: string;\n functionStatus?: string;\n functionLogs?: string;\n}\n\nfunction formatTime(date: Date): string {\n return date.toLocaleTimeString(\"en-US\", { hour12: false });\n}\n\n/**\n * List executor jobs for a given executor.\n * @param options - Options for listing executor jobs\n * @returns List of executor job information\n */\nexport async function listExecutorJobs<E extends ExecutorLike>(\n options: ListExecutorJobsTypedOptions<E>,\n): Promise<ExecutorJobListInfo[]>;\nexport async function listExecutorJobs(\n options: ListExecutorJobsOptions,\n): Promise<ExecutorJobListInfo[]>;\nexport async function listExecutorJobs<E extends ExecutorLike>(\n options: ListExecutorJobsOptions | ListExecutorJobsTypedOptions<E>,\n): Promise<ExecutorJobListInfo[]> {\n // Discriminant: legacy options have top-level 'executorName', typed options use 'executor'.\n const executorName = \"executorName\" in options ? options.executorName : options.executor.name;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n const filters: ReturnType<typeof create<typeof FilterSchema>>[] = [];\n\n if (options.status) {\n const statusValue = parseExecutorJobStatus(options.status);\n filters.push(\n create(FilterSchema, {\n condition: create(ConditionSchema, {\n field: \"status\",\n operator: Condition_Operator.EQ,\n value: { kind: { case: \"numberValue\", value: statusValue } },\n }),\n }),\n );\n }\n\n const filter = filters.length > 0 ? create(FilterSchema, { and: filters }) : undefined;\n\n try {\n const { jobs } = await client.listExecutorJobs({\n workspaceId,\n executorName,\n pageSize: options.limit,\n pageDirection: PageDirection.DESC,\n filter,\n });\n\n return jobs.map(toExecutorJobListInfo);\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Executor '${executorName}' not found.`);\n }\n throw error;\n }\n}\n\n/**\n * Get details of a specific executor job.\n * @param options - Options for getting executor job details\n * @returns Executor job detail information\n */\nexport async function getExecutorJob<E extends ExecutorLike>(\n options: GetExecutorJobTypedOptions<E>,\n): Promise<ExecutorJobDetailInfo>;\nexport async function getExecutorJob(\n options: GetExecutorJobOptions,\n): Promise<ExecutorJobDetailInfo>;\nexport async function getExecutorJob<E extends ExecutorLike>(\n options: GetExecutorJobOptions | GetExecutorJobTypedOptions<E>,\n): Promise<ExecutorJobDetailInfo> {\n // Discriminant: legacy options have top-level 'executorName', typed options use 'executor'.\n const executorName = \"executorName\" in options ? options.executorName : options.executor.name;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n try {\n const { job } = await client.getExecutorJob({\n workspaceId,\n executorName,\n jobId: options.jobId,\n });\n\n if (!job) {\n throw new Error(`Job '${options.jobId}' not found.`);\n }\n\n const jobInfo = toExecutorJobInfo(job);\n\n if (options.attempts) {\n const attempts = await fetchAll(async (pageToken, maxPageSize) => {\n const { attempts, nextPageToken } = await client.listExecutorJobAttempts({\n workspaceId,\n jobId: options.jobId,\n pageToken,\n pageSize: maxPageSize,\n pageDirection: PageDirection.DESC,\n });\n return [attempts, nextPageToken];\n });\n\n return {\n ...jobInfo,\n attempts: attempts.map(toExecutorJobAttemptInfo),\n };\n }\n\n return jobInfo;\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Job '${options.jobId}' not found for executor '${executorName}'.`);\n }\n throw error;\n }\n}\n\n/**\n * Watch an executor job until completion, including downstream executions.\n * @param options - Options for watching executor job\n * @returns Result including job details and downstream execution info\n */\nexport async function watchExecutorJob<E extends ExecutorLike>(\n options: WatchExecutorJobTypedOptions<E>,\n): Promise<WatchExecutorJobResult>;\nexport async function watchExecutorJob(\n options: WatchExecutorJobOptions,\n): Promise<WatchExecutorJobResult>;\nexport async function watchExecutorJob<E extends ExecutorLike>(\n options: WatchExecutorJobOptions | WatchExecutorJobTypedOptions<E>,\n): Promise<WatchExecutorJobResult> {\n // Discriminant: legacy options have top-level 'executorName', typed options use 'executor'.\n const executorName = \"executorName\" in options ? options.executorName : options.executor.name;\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n const interval = options.interval ?? 3000;\n const spinner = ora().start(\"Waiting for executor job to complete...\");\n\n try {\n // Get executor details to determine target type\n const { executor } = await client.getExecutorExecutor({\n workspaceId,\n name: executorName,\n });\n\n if (!executor) {\n throw new Error(`Executor '${executorName}' not found.`);\n }\n\n const targetType = executor.targetType;\n const targetTypeStr = executorTargetTypeToString(targetType);\n\n // Phase 1: Wait for executor job to complete\n let job: Awaited<ReturnType<typeof client.getExecutorJob>>[\"job\"];\n while (true) {\n const response = await client.getExecutorJob({\n workspaceId,\n executorName,\n jobId: options.jobId,\n });\n\n job = response.job;\n if (!job) {\n throw new Error(`Job '${options.jobId}' not found.`);\n }\n\n if (isExecutorJobTerminalStatus(job.status)) {\n break;\n }\n\n spinner.text = `Waiting for executor job... (${formatTime(new Date())})`;\n await setTimeout(interval);\n }\n\n const jobInfo = toExecutorJobInfo(job);\n const coloredStatus = colorizeExecutorJobStatus(jobInfo.status);\n\n if (job.status === ExecutorJobStatus.SUCCESS) {\n spinner.succeed(`Executor job completed: ${coloredStatus}`);\n } else {\n spinner.fail(`Executor job completed: ${coloredStatus}`);\n }\n\n // Get attempts to find operationReference\n const attempts = await fetchAll(async (pageToken, maxPageSize) => {\n const { attempts, nextPageToken } = await client.listExecutorJobAttempts({\n workspaceId,\n jobId: options.jobId,\n pageToken,\n pageSize: maxPageSize,\n pageDirection: PageDirection.DESC,\n });\n return [attempts, nextPageToken];\n });\n\n const attemptInfos = attempts.map(toExecutorJobAttemptInfo);\n const jobDetail: ExecutorJobDetailInfo = {\n ...jobInfo,\n attempts: attemptInfos,\n };\n\n const latestAttempt = attemptInfos[0];\n const operationReference = latestAttempt?.operationReference;\n\n // Phase 2: Based on target type, wait for the downstream execution\n if (operationReference) {\n switch (targetType) {\n case ExecutorTargetType.WORKFLOW: {\n // Wait for workflow execution with progress display\n spinner.stop();\n\n try {\n // Use waitForExecution with progress display (same as workflow start)\n const executionResult = await waitForExecution({\n client,\n workspaceId,\n executionId: operationReference,\n interval,\n showProgress: true,\n trackJobs: true,\n });\n\n // Fetch logs if requested\n let workflowJobLogs: WorkflowJobLog[] | undefined;\n if (options.logs) {\n const { execution: execWithLogs } = await getWorkflowExecution({\n executionId: operationReference,\n workspaceId: options.workspaceId,\n profile: options.profile,\n logs: true,\n });\n if (execWithLogs.jobDetails) {\n workflowJobLogs = execWithLogs.jobDetails\n .filter((job) => job.logs || job.result)\n .map((job) => ({\n jobName: job.stackedJobName || job.id,\n logs: job.logs,\n result: job.result,\n }));\n }\n }\n\n return {\n job: jobDetail,\n targetType: targetTypeStr,\n workflowExecutionId: operationReference,\n workflowStatus: executionResult.status,\n workflowJobLogs,\n };\n } catch (error) {\n logger.warn(\n `Could not track workflow execution: ${error instanceof Error ? error.message : error}`,\n );\n return {\n job: jobDetail,\n targetType: targetTypeStr,\n workflowExecutionId: operationReference,\n };\n }\n }\n\n case ExecutorTargetType.FUNCTION:\n case ExecutorTargetType.JOB_FUNCTION:\n {\n // Wait for function execution\n spinner.start(`Waiting for function execution ${operationReference}...`);\n\n try {\n while (true) {\n const { execution } = await client.getFunctionExecution({\n workspaceId,\n executionId: operationReference,\n });\n\n if (!execution) {\n throw new Error(`Function execution '${operationReference}' not found.`);\n }\n\n if (isFunctionExecutionTerminalStatus(execution.status)) {\n const statusStr = functionExecutionStatusToString(execution.status);\n const coloredFnStatus = colorizeFunctionExecutionStatus(statusStr);\n if (execution.status === FunctionExecution_Status.SUCCESS) {\n spinner.succeed(`Function execution completed: ${coloredFnStatus}`);\n } else {\n spinner.fail(`Function execution completed: ${coloredFnStatus}`);\n }\n return {\n job: jobDetail,\n targetType: targetTypeStr,\n functionExecutionId: operationReference,\n functionStatus: statusStr,\n functionLogs: options.logs ? execution.logs || undefined : undefined,\n };\n }\n\n spinner.text = `Waiting for function execution... (${formatTime(new Date())})`;\n await setTimeout(interval);\n }\n } catch (error) {\n spinner.warn(\n `Could not track function execution: ${error instanceof Error ? error.message : error}`,\n );\n return {\n job: jobDetail,\n targetType: targetTypeStr,\n functionExecutionId: operationReference,\n };\n }\n }\n break;\n default:\n // WEBHOOK, TAILOR_GRAPHQL, or unknown - no downstream execution to track\n break;\n }\n }\n\n return { job: jobDetail, targetType: targetTypeStr };\n } finally {\n spinner.stop();\n }\n}\n\nfunction printJobWithAttempts(job: ExecutorJobDetailInfo): void {\n // Print job summary\n const summaryData: [string, string][] = [\n [\"id\", job.id],\n [\"executorName\", job.executorName],\n [\"status\", job.status],\n [\"scheduledAt\", job.scheduledAt],\n [\"createdAt\", job.createdAt],\n [\"updatedAt\", job.updatedAt],\n ];\n logger.log(formatKeyValueTable(summaryData));\n\n // Print attempts\n if (job.attempts && job.attempts.length > 0) {\n logger.log(styles.bold(\"\\nAttempts:\"));\n for (const attempt of job.attempts) {\n logger.log(styles.info(`\\n--- Attempt ${attempt.id} ---`));\n logger.log(` Status: ${attempt.status}`);\n logger.log(` Started: ${attempt.startedAt}`);\n logger.log(` Finished: ${attempt.finishedAt}`);\n\n if (attempt.error) {\n logger.log(styles.error(\"\\n Error:\"));\n const errorLines = attempt.error.split(\"\\n\");\n for (const line of errorLines) {\n logger.log(` ${line}`);\n }\n }\n }\n }\n}\n\nexport const jobsCommand = defineCommand({\n name: \"jobs\",\n description: \"List or get executor jobs.\",\n examples: [\n {\n cmd: \"my-executor\",\n desc: \"List jobs for an executor (default: 50 jobs)\",\n },\n { cmd: \"my-executor --limit 10\", desc: \"Limit the number of jobs\" },\n { cmd: \"my-executor -s RUNNING\", desc: \"Filter by status\" },\n { cmd: \"my-executor <job-id>\", desc: \"Get job details\" },\n {\n cmd: \"my-executor <job-id> --attempts\",\n desc: \"Get job details with attempts\",\n },\n { cmd: \"my-executor <job-id> -W\", desc: \"Wait for job to complete\" },\n {\n cmd: \"my-executor <job-id> -W -l\",\n desc: \"Wait for job with logs\",\n },\n ],\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n executorName: arg(z.string(), {\n positional: true,\n description: \"Executor name\",\n }),\n jobId: arg(z.string().optional(), {\n positional: true,\n description: \"Job ID (if provided, shows job details)\",\n }),\n status: arg(z.string().optional(), {\n alias: \"s\",\n description:\n \"Filter by status (PENDING, RUNNING, SUCCESS, FAILED, CANCELED) (list mode only)\",\n }),\n attempts: arg(z.boolean().default(false), {\n description: \"Show job attempts (only with job ID) (detail mode only)\",\n }),\n wait: arg(z.boolean().default(false), {\n alias: \"W\",\n description:\n \"Wait for job completion and downstream execution (workflow/function) if applicable (detail mode only)\",\n }),\n interval: arg(durationArg.default(\"3s\"), {\n alias: \"i\",\n description: \"Polling interval when using --wait (e.g., '3s', '500ms', '1m')\",\n }),\n logs: arg(z.boolean().default(false), {\n alias: \"l\",\n description: \"Display function execution logs after completion (requires --wait)\",\n }),\n limit: arg(positiveIntArg.optional(), {\n description: \"Maximum number of jobs to list (default: 50, max: 1000) (list mode only)\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n if (args.jobId) {\n if (args.wait) {\n const result = await watchExecutorJob({\n executorName: args.executorName,\n jobId: args.jobId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n interval: parseDuration(args.interval),\n logs: args.logs,\n });\n\n // Print result\n if (!args.json) {\n logger.log(styles.bold(`Target Type: ${result.targetType}\\n`));\n printJobWithAttempts(result.job);\n if (result.workflowExecutionId) {\n logger.log(styles.bold(\"\\nWorkflow Execution:\"));\n logger.log(` ID: ${result.workflowExecutionId}`);\n if (result.workflowStatus) {\n logger.log(` Status: ${result.workflowStatus}`);\n }\n if (result.workflowJobLogs && result.workflowJobLogs.length > 0) {\n for (const jobLog of result.workflowJobLogs) {\n logger.log(styles.bold(`\\n Job: ${jobLog.jobName}`));\n if (jobLog.logs) {\n logger.log(styles.dim(\" Logs:\"));\n for (const line of jobLog.logs.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n }\n if (jobLog.result) {\n logger.log(styles.dim(\" Result:\"));\n try {\n const parsed = JSON.parse(jobLog.result);\n const formatted = JSON.stringify(parsed, null, 2);\n for (const line of formatted.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n } catch {\n logger.log(` ${jobLog.result}`);\n }\n }\n }\n }\n }\n if (result.functionExecutionId) {\n logger.log(styles.bold(\"\\nFunction Execution:\"));\n logger.log(` ID: ${result.functionExecutionId}`);\n if (result.functionStatus) {\n logger.log(` Status: ${result.functionStatus}`);\n }\n if (result.functionLogs) {\n logger.log(styles.dim(\" Logs:\"));\n for (const line of result.functionLogs.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n }\n }\n } else {\n logger.out(result);\n }\n return;\n }\n\n const job = await getExecutorJob({\n executorName: args.executorName,\n jobId: args.jobId,\n attempts: args.attempts,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n if (args.attempts && !args.json) {\n printJobWithAttempts(job);\n } else {\n logger.out(job);\n }\n } else {\n if (args.wait) {\n logger.warn(\"--wait flag is ignored in list mode. Specify a job ID to wait.\");\n }\n const jobs = await listExecutorJobs({\n executorName: args.executorName,\n status: args.status,\n limit: args.limit,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n logger.out(jobs);\n }\n }),\n});\n","import { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { type ExecutorListInfo, toExecutorListInfo } from \"./transform\";\n\nexport interface ListExecutorsOptions {\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * List executors in the workspace and return CLI-friendly info.\n * @param options - Executor listing options\n * @returns List of executors\n */\nexport async function listExecutors(options?: ListExecutorsOptions): Promise<ExecutorListInfo[]> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const executors = await fetchAll(async (pageToken, maxPageSize) => {\n const { executors, nextPageToken } = await client.listExecutorExecutors({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [executors, nextPageToken];\n });\n\n return executors.map((e) => toExecutorListInfo(e));\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List all executors\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const executors = await listExecutors({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n if (executors.length === 0) {\n logger.info(\"No executors found.\");\n return;\n }\n\n logger.out(executors, {\n display: {\n disabled: (v) => (v ? styles.warning(\"true\") : styles.dim(\"false\")),\n },\n });\n\n // Show hint if there are webhook executors (non-JSON mode only)\n if (!args.json) {\n const hasWebhook = executors.some((e) => e.triggerType === \"webhook\");\n if (hasWebhook) {\n logger.info(\"To see webhook URLs, run: tailor-sdk executor webhook list\");\n }\n }\n }),\n});\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { ExecutorTriggerType } from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n durationArg,\n jsonArgs,\n parseDuration,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { watchExecutorJob } from \"./jobs\";\nimport { executorTriggerTypeToString } from \"./status\";\nimport type { IncomingWebhookTrigger, ScheduleTriggerInput } from \"@/types/executor.generated\";\nimport type { JsonObject } from \"@bufbuild/protobuf\";\n\n/**\n * Schema for JSON string validation (object only)\n * Transforms the string to a parsed object\n */\nconst jsonDataArg = z\n .string()\n .transform((val) => {\n try {\n return JSON.parse(val) as unknown;\n } catch {\n throw new Error(`Invalid JSON data: ${val}. Please provide a valid JSON string.`);\n }\n })\n .refine((v): v is JsonObject => typeof v === \"object\" && v !== null && !Array.isArray(v), {\n message: \"JSON data must be an object, not an array or primitive value\",\n });\n\n/**\n * Schema for header string validation (format: \"Key: Value\")\n * Transforms the string to an object with key and value properties\n */\nconst headerArg = z\n .string()\n .superRefine((val, ctx) => {\n if (!val.includes(\":\")) {\n ctx.addIssue({\n code: z.ZodIssueCode.custom,\n message: `Invalid header format: '${val}'. Expected format: 'Key: Value'`,\n });\n }\n })\n .transform((val) => {\n const colonIndex = val.indexOf(\":\");\n return {\n key: val.slice(0, colonIndex).trim(),\n value: val.slice(colonIndex + 1).trim(),\n };\n })\n .refine((h) => h.key.length > 0, {\n message: \"Header name cannot be empty\",\n });\n\ntype ManualTrigger = IncomingWebhookTrigger | ScheduleTriggerInput;\n\ntype ManualTriggerExecutor<T extends ManualTrigger = ManualTrigger> = T extends ManualTrigger\n ? {\n name: string;\n trigger: T;\n }\n : never;\n\ntype TriggerExecutorBaseOptions<E extends ManualTriggerExecutor> = {\n executor: E;\n workspaceId?: string;\n profile?: string;\n};\n\n/**\n * @deprecated Use TriggerExecutorTypedOptions instead.\n */\nexport interface TriggerExecutorOptions {\n executorName: string;\n payload?: JsonObject;\n workspaceId?: string;\n profile?: string;\n}\n\nexport type TriggerExecutorTypedOptions<E extends ManualTriggerExecutor = ManualTriggerExecutor> =\n E extends ManualTriggerExecutor<IncomingWebhookTrigger>\n ? TriggerExecutorBaseOptions<E> & { payload?: JsonObject }\n : TriggerExecutorBaseOptions<E> & { payload?: never };\n\nexport interface TriggerExecutorResult {\n jobId?: string;\n}\n\nasync function triggerExecutorByName(\n options: TriggerExecutorOptions,\n): Promise<TriggerExecutorResult> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n try {\n const response = await client.triggerExecutor({\n workspaceId,\n executorName: options.executorName,\n payload: options.payload,\n });\n\n return { jobId: response.jobId };\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`Executor '${options.executorName}' not found.`);\n }\n if (error instanceof ConnectError && error.code === Code.InvalidArgument) {\n throw new Error(`Invalid argument: ${error.message}`);\n }\n throw error;\n }\n}\n\n/**\n * Trigger an executor and return the job ID.\n * @param options - Options for triggering executor\n * @returns Result containing the job ID if available\n */\nexport async function triggerExecutor<E extends ManualTriggerExecutor>(\n options: TriggerExecutorTypedOptions<E>,\n): Promise<TriggerExecutorResult>;\nexport async function triggerExecutor(\n options: TriggerExecutorOptions,\n): Promise<TriggerExecutorResult>;\nexport async function triggerExecutor<E extends ManualTriggerExecutor>(\n options: TriggerExecutorOptions | TriggerExecutorTypedOptions<E>,\n): Promise<TriggerExecutorResult> {\n // Keep backward compatibility: if both legacy and typed keys are present, prefer legacy shape.\n if (\"executorName\" in options) {\n return await triggerExecutorByName(options);\n }\n\n if (options.executor.trigger.kind !== \"incomingWebhook\" && options.payload !== undefined) {\n throw new Error(\n `Executor '${options.executor.name}' has '${options.executor.trigger.kind}' trigger type. ` +\n `The payload is only available for 'incomingWebhook' trigger type.`,\n );\n }\n\n return await triggerExecutorByName({\n executorName: options.executor.name,\n payload: options.payload,\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n}\n\nexport const triggerCommand = defineCommand({\n name: \"trigger\",\n description: \"Trigger an executor manually.\",\n notes: `Only executors with \\`INCOMING_WEBHOOK\\` or \\`SCHEDULE\\` trigger types can be triggered manually.\nExecutors with \\`EVENT\\` trigger types (such as \\`recordCreated\\`, \\`recordUpdated\\`, \\`recordDeleted\\`) cannot be triggered manually.\n\nThe \\`--data\\` and \\`--header\\` options are only available for \\`INCOMING_WEBHOOK\\` trigger type.\n\n**Downstream Execution Tracking**\n\nWhen using \\`--wait\\`, the CLI tracks not only the executor job but also any downstream executions:\n\n- **Workflow targets**: Waits for the workflow execution to complete (SUCCESS, FAILED, or PENDING_RESUME). Shows real-time status changes and currently running job names during execution (same output as \\`workflow start --wait\\`).\n- **Function targets**: Waits for the function execution to complete\n- **Webhook/GraphQL targets**: Only waits for the executor job itself\n\nThe \\`--logs\\` option displays logs from the downstream execution when available.`,\n examples: [\n { cmd: \"my-executor\", desc: \"Trigger an executor\" },\n {\n cmd: 'my-executor -d \\'{\"message\": \"hello\"}\\'',\n desc: \"Trigger with data\",\n },\n {\n cmd: 'my-executor -d \\'{\"message\": \"hello\"}\\' -H \"X-Custom: value\" -H \"X-Another: value2\"',\n desc: \"Trigger with data and headers\",\n },\n { cmd: \"my-executor -W\", desc: \"Trigger and wait for completion\" },\n { cmd: \"my-executor -W -l\", desc: \"Trigger, wait, and show logs\" },\n ],\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n executorName: arg(z.string(), {\n positional: true,\n description: \"Executor name\",\n }),\n data: arg(jsonDataArg.optional(), {\n alias: \"d\",\n description: \"Request body (JSON string)\",\n }),\n header: arg(headerArg.array().optional(), {\n alias: \"H\",\n overrideBuiltinAlias: true,\n description: \"Request header (format: 'Key: Value', can be specified multiple times)\",\n }),\n wait: arg(z.boolean().default(false), {\n alias: \"W\",\n description:\n \"Wait for job completion and downstream execution (workflow/function) if applicable\",\n }),\n interval: arg(durationArg.default(\"3s\"), {\n alias: \"i\",\n description: \"Polling interval when using --wait (e.g., '3s', '500ms', '1m')\",\n }),\n logs: arg(z.boolean().default(false), {\n alias: \"l\",\n description: \"Display function execution logs after completion (requires --wait)\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Validate trigger type before processing\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: args.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n const { executor } = await client.getExecutorExecutor({\n workspaceId,\n name: args.executorName,\n });\n\n if (!executor) {\n throw new Error(`Executor '${args.executorName}' not found.`);\n }\n\n // EVENT trigger type cannot be triggered manually\n if (executor.triggerType === ExecutorTriggerType.EVENT) {\n throw new Error(\n `Executor '${args.executorName}' has '${executorTriggerTypeToString(executor.triggerType)}' trigger type and cannot be triggered manually. ` +\n `Only executors with 'INCOMING_WEBHOOK' or 'SCHEDULE' triggers can be triggered manually.`,\n );\n }\n\n // SCHEDULE trigger type does not accept --data or --header options\n if (executor.triggerType === ExecutorTriggerType.SCHEDULE && (args.data || args.header)) {\n throw new Error(\n `Executor '${args.executorName}' has 'SCHEDULE' trigger type. ` +\n `The --data and --header options are only available for 'INCOMING_WEBHOOK' trigger type.`,\n );\n }\n\n let payload: JsonObject | undefined;\n\n // Build payload if data or headers are provided\n const body: JsonObject | undefined = args.data;\n const headers: Record<string, string> = {};\n if (args.header) {\n for (const h of args.header) {\n headers[h.key] = h.value;\n }\n }\n\n if (body !== undefined || Object.keys(headers).length > 0) {\n payload = {\n body: body ?? {},\n headers,\n };\n }\n\n const result = await triggerExecutorByName({\n executorName: args.executorName,\n payload,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n if (!result.jobId) {\n logger.success(`Executor '${args.executorName}' triggered successfully.`);\n if (args.wait) {\n logger.warn(\"Cannot watch: job ID not available. The API may need to be updated.\");\n }\n return;\n }\n\n logger.success(\n `Executor '${args.executorName}' triggered successfully. Job ID: ${result.jobId}`,\n );\n\n if (args.wait) {\n const watchResult = await watchExecutorJob({\n executorName: args.executorName,\n jobId: result.jobId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n interval: parseDuration(args.interval),\n logs: args.logs,\n });\n\n // Print result\n if (!args.json) {\n logger.log(styles.bold(`\\nTarget Type: ${watchResult.targetType}`));\n logger.log(`Job Status: ${watchResult.job.status}`);\n\n if (watchResult.workflowExecutionId) {\n logger.log(styles.bold(\"\\nWorkflow Execution:\"));\n logger.log(` ID: ${watchResult.workflowExecutionId}`);\n if (watchResult.workflowStatus) {\n logger.log(` Status: ${watchResult.workflowStatus}`);\n }\n if (watchResult.workflowJobLogs && watchResult.workflowJobLogs.length > 0) {\n for (const jobLog of watchResult.workflowJobLogs) {\n logger.log(styles.bold(`\\n Job: ${jobLog.jobName}`));\n if (jobLog.logs) {\n logger.log(styles.dim(\" Logs:\"));\n for (const line of jobLog.logs.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n }\n if (jobLog.result) {\n logger.log(styles.dim(\" Result:\"));\n try {\n const parsed = JSON.parse(jobLog.result);\n const formatted = JSON.stringify(parsed, null, 2);\n for (const line of formatted.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n } catch {\n logger.log(` ${jobLog.result}`);\n }\n }\n }\n }\n }\n if (watchResult.functionExecutionId) {\n logger.log(styles.bold(\"\\nFunction Execution:\"));\n logger.log(` ID: ${watchResult.functionExecutionId}`);\n if (watchResult.functionStatus) {\n logger.log(` Status: ${watchResult.functionStatus}`);\n }\n if (watchResult.functionLogs) {\n logger.log(styles.dim(\" Logs:\"));\n for (const line of watchResult.functionLogs.split(\"\\n\")) {\n logger.log(` ${line}`);\n }\n }\n }\n } else {\n logger.out(watchResult);\n }\n }\n }),\n});\n","import { ExecutorTriggerType } from \"@tailor-proto/tailor/v1/executor_resource_pb\";\nimport { defineCommand, runCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient, platformBaseUrl } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger, styles } from \"@/cli/shared/logger\";\n\nexport interface WebhookExecutorInfo {\n name: string;\n webhookUrl: string;\n disabled: boolean;\n}\n\nexport interface ListWebhookExecutorsOptions {\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * Build the webhook URL for an executor.\n * @param workspaceId - Workspace ID\n * @param executorName - Executor name\n * @returns Webhook URL\n */\nfunction buildWebhookUrl(workspaceId: string, executorName: string): string {\n return `${platformBaseUrl}/webhook/v1/${workspaceId}/executor/${executorName}`;\n}\n\n/**\n * List executors with incoming webhook triggers and return CLI-friendly info.\n * @param options - Listing options\n * @returns List of webhook executors with URLs\n */\nexport async function listWebhookExecutors(\n options?: ListWebhookExecutorsOptions,\n): Promise<WebhookExecutorInfo[]> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const executors = await fetchAll(async (pageToken, maxPageSize) => {\n const { executors, nextPageToken } = await client.listExecutorExecutors({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [executors, nextPageToken];\n });\n\n // Filter only incoming webhook triggers\n const webhookExecutors = executors.filter(\n (e) => e.triggerType === ExecutorTriggerType.INCOMING_WEBHOOK,\n );\n\n return webhookExecutors.map((e) => ({\n name: e.name,\n webhookUrl: buildWebhookUrl(workspaceId, e.name),\n disabled: e.disabled,\n }));\n}\n\nconst listWebhookCommand = defineCommand({\n name: \"list\",\n description: \"List executors with incoming webhook triggers\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const executors = await listWebhookExecutors({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n if (executors.length === 0) {\n logger.info(\"No webhook executors found.\");\n return;\n }\n\n logger.out(executors, {\n display: {\n disabled: (v) => (v ? styles.warning(\"true\") : styles.dim(\"false\")),\n },\n });\n\n if (!args.json) {\n logger.info(\n 'To test a webhook, run: tailor-sdk executor trigger <name> -d \\'{\"key\":\"value\"}\\'',\n );\n }\n }),\n});\n\nexport const webhookCommand = defineCommand({\n name: \"webhook\",\n description: \"Manage executor webhooks\",\n subCommands: {\n list: listWebhookCommand,\n },\n async run() {\n await runCommand(listWebhookCommand, []);\n },\n});\n","import type { IdProvider as IdProviderConfig, OAuth2ClientInput } from \"@/types/auth.generated\";\nimport type { Executor } from \"@/types/executor.generated\";\nimport type { PluginAttachment } from \"@/types/plugin\";\nimport type { Resolver } from \"@/types/resolver.generated\";\nimport type { TailorDBType, TypeSourceInfoEntry } from \"@/types/tailordb\";\n\nexport type { PluginAttachment } from \"@/types/plugin\";\n\n// ========================================\n// Basic types\n// ========================================\n\ninterface GeneratedFile {\n path: string;\n content: string;\n skipIfExists?: boolean; // default: false\n executable?: boolean; // default: false - if true, sets chmod +x\n}\n\nexport interface GeneratorResult {\n files: GeneratedFile[];\n errors?: string[];\n}\n\n// Namespace results for TailorDB\nexport interface TailorDBNamespaceResult<T> {\n namespace: string;\n types: T;\n}\n\n// Namespace results for Resolver\nexport interface ResolverNamespaceResult<R> {\n namespace: string;\n resolvers: R;\n}\n\n// Auth configuration for generators\nexport interface GeneratorAuthInput {\n name: string;\n userProfile?: {\n typeName: string;\n namespace: string;\n usernameField: string;\n };\n machineUsers?: Record<string, { attributes: Record<string, unknown> }>;\n oauth2Clients?: Record<string, OAuth2ClientInput>;\n idProvider?: IdProviderConfig;\n}\n\n// ========================================\n// Dependency types\n// ========================================\n\nexport type DependencyKind = \"tailordb\" | \"resolver\" | \"executor\";\n\n// Check if array includes a specific element\ntype ArrayIncludes<T extends readonly unknown[], U> = T extends readonly [\n infer First,\n ...infer Rest,\n]\n ? First extends U\n ? true\n : ArrayIncludes<Rest, U>\n : false;\n\n// Check if dependencies array includes a specific dependency\nexport type HasDependency<\n Deps extends readonly DependencyKind[],\n D extends DependencyKind,\n> = ArrayIncludes<Deps, D>;\n\n// ========================================\n// Source info type for TailorDB types\n// Re-exported from parser module\n// ========================================\n\nexport type {\n UserDefinedTypeSource,\n PluginGeneratedTypeSource,\n TypeSourceInfoEntry,\n} from \"@/types/tailordb\";\n\n// ========================================\n// Method interfaces for each dependency\n// ========================================\n\nexport interface TailorDBProcessMethods<T, Ts> {\n processType(args: {\n type: TailorDBType;\n namespace: string;\n source: TypeSourceInfoEntry;\n /** Plugin attachments configured on this type via .plugin() method */\n plugins: readonly PluginAttachment[];\n }): T | Promise<T>;\n\n processTailorDBNamespace?(args: {\n namespace: string;\n types: Record<string, T>;\n }): Ts | Promise<Ts>;\n}\n\nexport interface ResolverProcessMethods<R, Rs> {\n processResolver(args: { resolver: Resolver; namespace: string }): R | Promise<R>;\n\n processResolverNamespace?(args: {\n namespace: string;\n resolvers: Record<string, R>;\n }): Rs | Promise<Rs>;\n}\n\nexport interface ExecutorProcessMethods<E> {\n processExecutor(executor: Executor): E | Promise<E>;\n}\n\n// ========================================\n// Conditional method selection\n// ========================================\n\ntype SelectMethods<Deps extends readonly DependencyKind[], T, R, E, Ts, Rs> = (HasDependency<\n Deps,\n \"tailordb\"\n> extends true\n ? TailorDBProcessMethods<T, Ts>\n : object) &\n (HasDependency<Deps, \"resolver\"> extends true ? ResolverProcessMethods<R, Rs> : object) &\n (HasDependency<Deps, \"executor\"> extends true ? ExecutorProcessMethods<E> : object);\n\n// ========================================\n// Conditional input selection for aggregate\n// ========================================\n\ninterface TailorDBInputPart<Ts> {\n tailordb: TailorDBNamespaceResult<Ts>[];\n}\n\ninterface ResolverInputPart<Rs> {\n resolver: ResolverNamespaceResult<Rs>[];\n}\n\ninterface ExecutorInputPart<E> {\n executor: E[];\n}\n\n// Auth is always available (resolved after TailorDB, before generators)\ninterface AuthPart {\n auth?: GeneratorAuthInput;\n}\n\ntype SelectInput<Deps extends readonly DependencyKind[], Ts, Rs, E> = (HasDependency<\n Deps,\n \"tailordb\"\n> extends true\n ? TailorDBInputPart<Ts>\n : object) &\n (HasDependency<Deps, \"resolver\"> extends true ? ResolverInputPart<Rs> : object) &\n (HasDependency<Deps, \"executor\"> extends true ? ExecutorInputPart<E> : object) &\n AuthPart;\n\n/** Input type for TailorDB-only generators */\nexport type TailorDBInput<Ts> = TailorDBInputPart<Ts> & AuthPart;\n\n/** Input type for Resolver-only generators */\nexport type ResolverInput<Rs> = ResolverInputPart<Rs> & AuthPart;\n\n/** Input type for Executor-only generators */\nexport type ExecutorInput<E> = ExecutorInputPart<E> & AuthPart;\n\n/** Input type for full generators (TailorDB + Resolver + Executor) */\nexport type FullInput<Ts, Rs, E> = TailorDBInputPart<Ts> &\n ResolverInputPart<Rs> &\n ExecutorInputPart<E> &\n AuthPart;\n\n/** Arguments type for aggregate method */\nexport interface AggregateArgs<Input> {\n input: Input;\n baseDir: string;\n configPath: string;\n}\n\n// ========================================\n// CodeGenerator type definition\n// ========================================\n\ninterface CodeGeneratorCore {\n readonly id: string;\n readonly description: string;\n}\n\n/**\n * Generator interface with dependencies-based conditional methods.\n * @template Deps - Dependencies array (e.g., ['tailordb'], ['tailordb', 'resolver'])\n * @template T - Return type of processType\n * @template R - Return type of processResolver\n * @template E - Return type of processExecutor\n * @template Ts - Return type of processTailorDBNamespace (default: Record<string, T>)\n * @template Rs - Return type of processResolverNamespace (default: Record<string, R>)\n */\nexport type CodeGenerator<\n Deps extends readonly DependencyKind[],\n T = unknown,\n R = unknown,\n E = unknown,\n Ts = Record<string, T>,\n Rs = Record<string, R>,\n> = CodeGeneratorCore &\n SelectMethods<Deps, T, R, E, Ts, Rs> & {\n readonly dependencies: Deps;\n\n aggregate(args: {\n input: SelectInput<Deps, Ts, Rs, E>;\n baseDir: string;\n configPath: string;\n }): GeneratorResult | Promise<GeneratorResult>;\n };\n\n// ========================================\n// Helper types for common generator patterns\n// ========================================\n\n/** TailorDB-only generator */\nexport type TailorDBGenerator<T = unknown, Ts = Record<string, T>> = CodeGenerator<\n readonly [\"tailordb\"],\n T,\n never,\n never,\n Ts,\n never\n>;\n\n/** Resolver-only generator */\nexport type ResolverGenerator<R = unknown, Rs = Record<string, R>> = CodeGenerator<\n readonly [\"resolver\"],\n never,\n R,\n never,\n never,\n Rs\n>;\n\n/** Executor-only generator */\nexport type ExecutorGenerator<E = unknown> = CodeGenerator<\n readonly [\"executor\"],\n never,\n never,\n E,\n never,\n never\n>;\n\n/** TailorDB + Resolver generator */\nexport type TailorDBResolverGenerator<\n T = unknown,\n R = unknown,\n Ts = Record<string, T>,\n Rs = Record<string, R>,\n> = CodeGenerator<readonly [\"tailordb\", \"resolver\"], T, R, never, Ts, Rs>;\n\n/** Full generator (all dependencies) */\nexport type FullCodeGenerator<\n T = unknown,\n R = unknown,\n E = unknown,\n Ts = Record<string, T>,\n Rs = Record<string, R>,\n> = CodeGenerator<readonly [\"tailordb\", \"resolver\", \"executor\"], T, R, E, Ts, Rs>;\n\n// ========================================\n// Runtime utility\n// ========================================\n\ninterface DependencyCarrier {\n dependencies: readonly DependencyKind[];\n}\n\n/**\n * Type guard to check if a generator has a specific dependency.\n * @template D\n * @param generator - Code generator instance\n * @param dependency - Dependency kind to check\n * @returns True if the generator has the dependency\n */\nexport function hasDependency<D extends DependencyKind>(\n generator: DependencyCarrier,\n dependency: D,\n): boolean {\n return generator.dependencies.includes(dependency);\n}\n\n// Type for any generator (used in GenerationManager)\n// This is a more permissive type that includes all possible methods\nexport interface AnyCodeGenerator {\n readonly id: string;\n readonly description: string;\n readonly dependencies: readonly DependencyKind[];\n\n processType?(args: {\n type: TailorDBType;\n namespace: string;\n source: TypeSourceInfoEntry;\n plugins: readonly PluginAttachment[];\n }): unknown | Promise<unknown>;\n\n processTailorDBNamespace?(args: {\n namespace: string;\n types: Record<string, unknown>;\n }): unknown | Promise<unknown>;\n\n processResolver?(args: { resolver: Resolver; namespace: string }): unknown | Promise<unknown>;\n\n processResolverNamespace?(args: {\n namespace: string;\n resolvers: Record<string, unknown>;\n }): unknown | Promise<unknown>;\n\n processExecutor?(executor: Executor): unknown | Promise<unknown>;\n\n aggregate(args: {\n input: Record<string, unknown>;\n baseDir: string;\n configPath: string;\n }): GeneratorResult | Promise<GeneratorResult>;\n}\n","import { glob } from \"node:fs/promises\";\nimport { watch } from \"chokidar\";\nimport * as madgeModule from \"madge\";\nimport * as path from \"pathe\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport type { MadgeLoader } from \"./types\";\n\n/**\n * Types of file change events.\n */\ntype FileChangeEvent = \"add\" | \"change\" | \"unlink\";\n\n/**\n * Definition of a watch group.\n */\ninterface WatchGroup {\n /** Unique identifier of the group. */\n id: string;\n /** File patterns to watch (glob format). */\n patterns: string[];\n /** List of absolute file paths in the group. */\n files: Set<string>;\n}\n\n/**\n * Node in the dependency graph.\n */\ninterface DependencyNode {\n /** Absolute path of the file. */\n filePath: string;\n /** List of files this file depends on. */\n dependencies: Set<string>;\n /** List of files that depend on this file. */\n dependents: Set<string>;\n}\n\n/**\n * Impact analysis result.\n */\ninterface ImpactAnalysisResult {\n /** Changed file. */\n changedFile: string;\n /** List of affected files (all files depending on the changed file). */\n affectedFiles: string[];\n /** List of affected watch groups. */\n affectedGroups: string[];\n}\n\n/**\n * Type of the error handling callback.\n */\ntype ErrorCallback = (error: WatcherError) => void;\n\n/**\n * Options for the watcher system.\n */\ninterface WatcherOptions {\n /** Options for chokidar. */\n chokidarOptions?: Parameters<typeof watch>[1];\n /** Options for madge. */\n madgeOptions?: Parameters<MadgeLoader>[1];\n /** Update interval for the dependency graph (milliseconds). */\n dependencyUpdateInterval?: number;\n /** Debounce duration (milliseconds). */\n debounceTime?: number;\n /** Whether to enable circular dependency detection. */\n detectCircularDependencies?: boolean;\n}\n\n/**\n * Watcher status.\n */\ninterface WatchStatus {\n /** Whether watching is active. */\n isWatching: boolean;\n /** Number of watch groups. */\n groupCount: number;\n /** Number of watched files. */\n fileCount: number;\n /** Number of nodes in the dependency graph. */\n dependencyNodeCount: number;\n}\n\n/**\n * Graph statistics.\n */\ninterface GraphStats {\n /** Number of nodes. */\n nodeCount: number;\n /** Number of edges. */\n edgeCount: number;\n /** Number of circular dependencies. */\n circularDependencyCount: number;\n}\n\n/**\n * Error codes.\n */\nconst WatcherErrorCode = {\n DEPENDENCY_ANALYSIS_FAILED: \"DEPENDENCY_ANALYSIS_FAILED\",\n FILE_WATCH_FAILED: \"FILE_WATCH_FAILED\",\n CIRCULAR_DEPENDENCY_DETECTED: \"CIRCULAR_DEPENDENCY_DETECTED\",\n INVALID_WATCH_GROUP: \"INVALID_WATCH_GROUP\",\n MADGE_INITIALIZATION_FAILED: \"MADGE_INITIALIZATION_FAILED\",\n} as const;\ntype WatcherErrorCode = (typeof WatcherErrorCode)[keyof typeof WatcherErrorCode];\n\n/**\n * Watcher-specific error.\n */\nexport class WatcherError extends Error {\n constructor(\n message: string,\n public readonly code: WatcherErrorCode,\n public readonly filePath?: string,\n public readonly originalError?: Error,\n ) {\n super(message);\n this.name = \"WatcherError\";\n }\n}\n\n/**\n * Dependency graph manager type.\n */\nexport type DependencyGraphManager = {\n buildGraph: (filePaths: string[]) => Promise<void>;\n getDependents: (filePath: string) => string[];\n getDependencies: (filePath: string) => string[];\n findCircularDependencies: () => string[][];\n addNode: (filePath: string) => void;\n removeNode: (filePath: string) => void;\n getGraphStats: () => GraphStats;\n};\n\n/**\n * Creates a dependency graph manager.\n * @param options - Options for madge\n * @returns DependencyGraphManager instance\n */\nexport function createDependencyGraphManager(\n options: Parameters<MadgeLoader>[1] = {},\n): DependencyGraphManager {\n const graph: Map<string, DependencyNode> = new Map();\n let madgeInstance: Awaited<ReturnType<MadgeLoader>> | null = null;\n let madgeLoader: MadgeLoader | null = null;\n\n function getMadgeLoader(): MadgeLoader {\n if (madgeLoader) {\n return madgeLoader;\n }\n\n const defaultExport = (madgeModule as { default?: unknown }).default;\n if (typeof defaultExport === \"function\") {\n madgeLoader = defaultExport as MadgeLoader;\n return madgeLoader;\n }\n\n if (typeof (madgeModule as unknown) === \"function\") {\n madgeLoader = madgeModule as unknown as MadgeLoader;\n return madgeLoader;\n }\n\n throw new WatcherError(\n \"Failed to initialize madge analyzer: module did not export a callable function.\",\n WatcherErrorCode.MADGE_INITIALIZATION_FAILED,\n );\n }\n\n function traverseDependents(filePath: string, visited: Set<string>): string[] {\n if (visited.has(filePath)) return [];\n visited.add(filePath);\n\n const node = graph.get(filePath);\n if (!node) return [];\n\n const result: string[] = [];\n for (const dependent of node.dependents) {\n result.push(dependent);\n result.push(...traverseDependents(dependent, visited));\n }\n\n return result;\n }\n\n function traverseDependencies(filePath: string, visited: Set<string>): string[] {\n if (visited.has(filePath)) return [];\n visited.add(filePath);\n\n const node = graph.get(filePath);\n if (!node) return [];\n\n const result: string[] = [];\n for (const dependency of node.dependencies) {\n result.push(dependency);\n result.push(...traverseDependencies(dependency, visited));\n }\n\n return result;\n }\n\n function addNode(filePath: string): void {\n const absolutePath = path.resolve(filePath);\n if (!graph.has(absolutePath)) {\n graph.set(absolutePath, {\n filePath: absolutePath,\n dependencies: new Set(),\n dependents: new Set(),\n });\n }\n }\n\n function removeNode(filePath: string): void {\n const absolutePath = path.resolve(filePath);\n const node = graph.get(absolutePath);\n if (!node) return;\n\n for (const dep of node.dependencies) {\n const depNode = graph.get(dep);\n if (depNode) {\n depNode.dependents.delete(absolutePath);\n }\n }\n\n for (const dependent of node.dependents) {\n const dependentNode = graph.get(dependent);\n if (dependentNode) {\n dependentNode.dependencies.delete(absolutePath);\n }\n }\n\n graph.delete(absolutePath);\n }\n\n function findCircularDependencies(): string[][] {\n if (!madgeInstance) return [];\n try {\n return madgeInstance.circular();\n } catch (error) {\n logger.warn(`Failed to detect circular dependencies: ${String(error)}`);\n return [];\n }\n }\n\n return {\n async buildGraph(filePaths: string[]): Promise<void> {\n try {\n if (filePaths.length === 0) return;\n\n const madge = getMadgeLoader();\n\n madgeInstance = await madge(filePaths, {\n fileExtensions: [\"ts\", \"js\"],\n excludeRegExp: [/node_modules/],\n baseDir: \".\",\n ...options,\n });\n\n const dependencyObj = madgeInstance.obj() as Record<string, string[]>;\n graph.clear();\n\n for (const filePath of filePaths) {\n addNode(filePath);\n }\n\n for (const [filePath, dependencies] of Object.entries(dependencyObj)) {\n const absoluteFilePath = path.resolve(\".\", filePath);\n const node = graph.get(absoluteFilePath);\n if (!node) continue;\n\n for (const dep of dependencies) {\n const absoluteDepPath = path.resolve(\".\", dep);\n node.dependencies.add(absoluteDepPath);\n\n const depNode = graph.get(absoluteDepPath);\n if (depNode) {\n depNode.dependents.add(absoluteFilePath);\n }\n }\n }\n } catch (error) {\n if (error instanceof WatcherError) {\n throw error;\n }\n throw new WatcherError(\n `Failed to build dependency graph: ${error instanceof Error ? error.message : String(error)}`,\n WatcherErrorCode.DEPENDENCY_ANALYSIS_FAILED,\n undefined,\n error instanceof Error ? error : undefined,\n );\n }\n },\n\n getDependents(filePath: string): string[] {\n const visited = new Set<string>();\n return traverseDependents(path.resolve(filePath), visited);\n },\n\n getDependencies(filePath: string): string[] {\n const visited = new Set<string>();\n return traverseDependencies(path.resolve(filePath), visited);\n },\n\n findCircularDependencies,\n addNode,\n removeNode,\n\n getGraphStats(): GraphStats {\n let edgeCount = 0;\n for (const node of graph.values()) {\n edgeCount += node.dependencies.size;\n }\n\n return {\n nodeCount: graph.size,\n edgeCount,\n circularDependencyCount: findCircularDependencies().length,\n };\n },\n };\n}\n\n/**\n * Dependency watcher type.\n */\nexport type DependencyWatcher = {\n initialize: () => Promise<void>;\n addWatchGroup: (groupId: string, patterns: string[]) => Promise<void>;\n removeWatchGroup: (groupId: string) => Promise<void>;\n start: () => Promise<void>;\n stop: () => Promise<void>;\n onError: (callback: ErrorCallback) => void;\n updateDependencyGraph: () => Promise<void>;\n calculateImpact: (filePath: string) => ImpactAnalysisResult;\n detectCircularDependencies: () => string[][];\n getWatchStatus: () => WatchStatus;\n setRestartCallback: (callback: () => void) => void;\n};\n\n/**\n * Creates a dependency watcher.\n * @param options - Watcher options\n * @returns DependencyWatcher instance\n */\nexport function createDependencyWatcher(options: WatcherOptions = {}): DependencyWatcher {\n let chokidarWatcher: ReturnType<typeof watch> | null = null;\n const watchGroups: Map<string, WatchGroup> = new Map();\n const dependencyGraphManager = createDependencyGraphManager(options.madgeOptions);\n let errorCallback: ErrorCallback | null = null;\n const debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n let isInitialized = false;\n const dependencyCache: Map<string, string[]> = new Map();\n const maxCacheSize = 1000;\n let signalHandlersRegistered = false;\n let restartCallback: (() => void) | null = null;\n\n function validateWatchGroup(groupId: string, patterns: string[]): void {\n if (!groupId || typeof groupId !== \"string\") {\n throw new WatcherError(\n \"Group ID must be a non-empty string\",\n WatcherErrorCode.INVALID_WATCH_GROUP,\n );\n }\n\n if (!Array.isArray(patterns) || patterns.length === 0) {\n throw new WatcherError(\n \"Patterns must be a non-empty array\",\n WatcherErrorCode.INVALID_WATCH_GROUP,\n );\n }\n\n if (watchGroups.has(groupId)) {\n throw new WatcherError(\n `Watch group with ID '${groupId}' already exists`,\n WatcherErrorCode.INVALID_WATCH_GROUP,\n );\n }\n }\n\n function handleError(error: WatcherError): void {\n logger.error(\n `[DependencyWatcher] ${error.message} (code: ${error.code}, filePath: ${error.filePath})`,\n );\n\n if (errorCallback) {\n errorCallback(error);\n }\n }\n\n function setCacheValue(key: string, value: string[]): void {\n if (dependencyCache.size >= maxCacheSize) {\n const firstKey = dependencyCache.keys().next().value;\n if (firstKey) {\n dependencyCache.delete(firstKey);\n }\n }\n dependencyCache.set(key, value);\n }\n\n function findAffectedFiles(changedFile: string): string[] {\n return dependencyGraphManager.getDependents(changedFile);\n }\n\n function findAffectedGroups(affectedFiles: string[]): string[] {\n logger.debug(`Finding affected groups for files: ${affectedFiles.join(\", \")}`);\n const affectedGroupsSet = new Set<string>();\n\n for (const [groupId, group] of watchGroups) {\n for (const affectedFile of affectedFiles) {\n if (group.files.has(affectedFile)) {\n logger.debug(`Group ${groupId} is affected by file: ${affectedFile}`);\n affectedGroupsSet.add(groupId);\n break;\n }\n }\n }\n\n return Array.from(affectedGroupsSet);\n }\n\n function calculateImpact(filePath: string): ImpactAnalysisResult {\n const cacheKey = `impact:${filePath}`;\n let affectedFiles = dependencyCache.get(cacheKey);\n\n if (!affectedFiles) {\n affectedFiles = findAffectedFiles(filePath);\n setCacheValue(cacheKey, affectedFiles);\n }\n\n // Include the changed file itself in the affected files\n const allAffectedFiles = [filePath, ...affectedFiles];\n const affectedGroups = findAffectedGroups(allAffectedFiles);\n\n return {\n changedFile: filePath,\n affectedFiles: allAffectedFiles,\n affectedGroups,\n };\n }\n\n async function updateDependencyGraph(): Promise<void> {\n const allFiles: string[] = [];\n for (const group of watchGroups.values()) {\n allFiles.push(...Array.from(group.files));\n }\n\n await dependencyGraphManager.buildGraph(allFiles);\n dependencyCache.clear();\n\n if (options.detectCircularDependencies) {\n const circularDeps = dependencyGraphManager.findCircularDependencies();\n if (circularDeps.length > 0) {\n logger.warn(`Circular dependencies detected: ${JSON.stringify(circularDeps)}`);\n }\n }\n }\n\n async function handleFileChange(event: FileChangeEvent, filePath: string): Promise<void> {\n try {\n const absolutePath = path.resolve(filePath);\n\n if (event === \"unlink\") {\n dependencyGraphManager.removeNode(absolutePath);\n } else {\n dependencyGraphManager.addNode(absolutePath);\n if (event === \"change\") {\n await updateDependencyGraph();\n }\n }\n\n dependencyCache.clear();\n\n const impactResult = calculateImpact(absolutePath);\n\n // If any groups are affected, trigger restart instead of calling callbacks\n if (impactResult.affectedGroups.length > 0) {\n logger.info(\"File change detected, restarting watch process...\", {\n mode: \"stream\",\n });\n logger.info(`Changed file: ${absolutePath}`, { mode: \"stream\" });\n logger.info(`Affected groups: ${impactResult.affectedGroups.join(\", \")}`, {\n mode: \"stream\",\n });\n\n if (restartCallback) {\n restartCallback();\n }\n } else {\n logger.debug(`No affected groups found for file: ${absolutePath}`);\n }\n } catch (error) {\n handleError(\n new WatcherError(\n `Failed to handle file change: ${error instanceof Error ? error.message : String(error)}`,\n WatcherErrorCode.DEPENDENCY_ANALYSIS_FAILED,\n filePath,\n error instanceof Error ? error : undefined,\n ),\n );\n }\n }\n\n function debounceFileChange(event: FileChangeEvent, filePath: string): void {\n const key = `${event}:${filePath}`;\n\n if (debounceTimers.has(key)) {\n clearTimeout(debounceTimers.get(key));\n }\n\n const timer = setTimeout(() => {\n handleFileChange(event, filePath);\n debounceTimers.delete(key);\n }, options.debounceTime || 100);\n\n debounceTimers.set(key, timer);\n }\n\n async function stop(): Promise<void> {\n if (chokidarWatcher) {\n await chokidarWatcher.close();\n chokidarWatcher = null;\n }\n\n for (const timer of debounceTimers.values()) {\n clearTimeout(timer);\n }\n debounceTimers.clear();\n\n removeSignalHandlers();\n isInitialized = false;\n }\n\n function setupSignalHandlers(): void {\n if (signalHandlersRegistered) return;\n\n const handleSignal = async () => {\n try {\n await stop();\n logger.info(\"Watcher stopped successfully\");\n process.exit(0);\n } catch (error) {\n logger.error(`Error during shutdown: ${String(error)}`);\n process.exit(0);\n }\n };\n\n process.on(\"SIGINT\", () => handleSignal());\n process.on(\"SIGTERM\", () => handleSignal());\n signalHandlersRegistered = true;\n }\n\n function removeSignalHandlers(): void {\n if (!signalHandlersRegistered) return;\n\n process.removeAllListeners(\"SIGINT\");\n process.removeAllListeners(\"SIGTERM\");\n signalHandlersRegistered = false;\n }\n\n async function initialize(): Promise<void> {\n if (isInitialized) return;\n\n try {\n chokidarWatcher = watch([], {\n ignored: /node_modules/,\n persistent: true,\n ignoreInitial: true,\n usePolling: false,\n awaitWriteFinish: {\n stabilityThreshold: 100,\n pollInterval: 100,\n },\n ...options.chokidarOptions,\n });\n\n chokidarWatcher.on(\"add\", (filePath: string) => {\n logger.debug(`File added: ${filePath}`);\n debounceFileChange(\"add\", filePath);\n });\n\n chokidarWatcher.on(\"change\", (filePath: string) => {\n logger.debug(`File changed: ${filePath}`);\n debounceFileChange(\"change\", filePath);\n });\n\n chokidarWatcher.on(\"unlink\", (filePath: string) => {\n logger.debug(`File removed: ${filePath}`);\n debounceFileChange(\"unlink\", filePath);\n });\n\n chokidarWatcher.on(\"error\", (error: unknown) => {\n logger.error(`Watcher error: ${error instanceof Error ? error.message : String(error)}`, {\n mode: \"stream\",\n });\n handleError(\n new WatcherError(\n `File watcher error: ${error instanceof Error ? error.message : String(error)}`,\n WatcherErrorCode.FILE_WATCH_FAILED,\n undefined,\n error instanceof Error ? error : undefined,\n ),\n );\n });\n\n setupSignalHandlers();\n isInitialized = true;\n } catch (error) {\n throw new WatcherError(\n `Failed to initialize watcher: ${error instanceof Error ? error.message : String(error)}`,\n WatcherErrorCode.FILE_WATCH_FAILED,\n undefined,\n error instanceof Error ? error : undefined,\n );\n }\n }\n\n return {\n initialize,\n\n async addWatchGroup(groupId: string, patterns: string[]): Promise<void> {\n validateWatchGroup(groupId, patterns);\n\n if (!isInitialized) {\n await initialize();\n }\n\n const files = new Set<string>();\n for (const pattern of patterns) {\n logger.log(\n `${styles.dim(`Watch pattern for`)} ${styles.dim(groupId + \":\")} ${path.relative(process.cwd(), pattern)}`,\n );\n for await (const file of glob(pattern)) {\n files.add(path.resolve(file));\n }\n }\n\n const watchGroup: WatchGroup = {\n id: groupId,\n patterns,\n files,\n };\n\n watchGroups.set(groupId, watchGroup);\n\n if (chokidarWatcher) {\n const filePaths = Array.from(files);\n chokidarWatcher.add(filePaths);\n }\n\n await updateDependencyGraph();\n },\n\n async removeWatchGroup(groupId: string): Promise<void> {\n const watchGroup = watchGroups.get(groupId);\n if (!watchGroup) return;\n\n if (chokidarWatcher) {\n chokidarWatcher.unwatch(watchGroup.patterns);\n }\n\n for (const filePath of watchGroup.files) {\n dependencyGraphManager.removeNode(filePath);\n }\n\n watchGroups.delete(groupId);\n dependencyCache.clear();\n },\n\n async start(): Promise<void> {\n if (!isInitialized) {\n await initialize();\n }\n await updateDependencyGraph();\n },\n\n stop,\n\n onError(callback: ErrorCallback): void {\n errorCallback = callback;\n },\n\n updateDependencyGraph,\n calculateImpact,\n\n detectCircularDependencies(): string[][] {\n return dependencyGraphManager.findCircularDependencies();\n },\n\n getWatchStatus(): WatchStatus {\n let fileCount = 0;\n for (const group of watchGroups.values()) {\n fileCount += group.files.size;\n }\n\n const stats = dependencyGraphManager.getGraphStats();\n\n return {\n isWatching: isInitialized && chokidarWatcher !== null,\n groupCount: watchGroups.size,\n fileCount,\n dependencyNodeCount: stats.nodeCount,\n };\n },\n\n setRestartCallback(callback: () => void): void {\n restartCallback = callback;\n },\n };\n}\n\nexport { WatcherErrorCode };\n","import { spawn } from \"node:child_process\";\nimport * as fs from \"node:fs\";\nimport * as path from \"pathe\";\nimport {\n type AnyCodeGenerator,\n type TailorDBNamespaceResult,\n type ResolverNamespaceResult,\n type GeneratorAuthInput,\n type GeneratorResult,\n type DependencyKind,\n hasDependency,\n} from \"@/cli/commands/generate/types\";\nimport {\n defineApplication,\n generatePluginFilesIfNeeded,\n type Application,\n} from \"@/cli/services/application\";\nimport { createExecutorService } from \"@/cli/services/executor/service\";\nimport { loadConfig, type LoadedConfig, type Generator } from \"@/cli/shared/config-loader\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { generateUserTypes } from \"@/cli/shared/type-generator\";\nimport { withSpan } from \"@/cli/telemetry\";\nimport { PluginManager } from \"@/plugin/manager\";\nimport { type TailorDBNamespaceData, type ResolverNamespaceData } from \"@/types/plugin-generation\";\nimport { createDependencyWatcher, type DependencyWatcher } from \"./watch\";\nimport type { GenerateOptions } from \"./options\";\nimport type { Executor } from \"@/types/executor.generated\";\nimport type { Plugin, PluginAttachment } from \"@/types/plugin\";\nimport type { Resolver } from \"@/types/resolver.generated\";\nimport type { TypeSourceInfo, TailorDBType } from \"@/types/tailordb\";\n\nexport type { CodeGenerator } from \"@/cli/commands/generate/types\";\n\ntype TypeInfo = {\n types: Record<string, TailorDBType>;\n sourceInfo: TypeSourceInfo;\n pluginAttachments: ReadonlyMap<string, readonly PluginAttachment[]>;\n};\n\n/**\n * Generation manager type.\n */\nexport type GenerationManager = {\n readonly application: Application;\n readonly baseDir: string;\n readonly generators: Generator[];\n readonly services: {\n tailordb: Record<string, TypeInfo>;\n resolver: Record<string, Record<string, Resolver>>;\n executor: Record<string, Executor>;\n };\n readonly generatorResults: GeneratorResults;\n processGenerator: (gen: AnyCodeGenerator) => Promise<void>;\n processTailorDBNamespace: (\n gen: AnyCodeGenerator,\n namespace: string,\n typeInfo: TypeInfo,\n ) => Promise<void>;\n processResolverNamespace: (\n gen: AnyCodeGenerator,\n namespace: string,\n resolvers: Record<string, Resolver>,\n ) => Promise<void>;\n processExecutors: (gen: AnyCodeGenerator) => Promise<void>;\n aggregate: (gen: AnyCodeGenerator) => Promise<void>;\n generate: (watch: boolean) => Promise<void>;\n watch: () => Promise<void>;\n};\n\ntype GeneratorResults = Record<\n /* generator */ string,\n {\n tailordbResults: Record</* namespace */ string, Record</* type */ string, unknown>>;\n resolverResults: Record</* namespace */ string, Record</* resolver */ string, unknown>>;\n tailordbNamespaceResults: Record</* namespace */ string, unknown>;\n resolverNamespaceResults: Record</* namespace */ string, unknown>;\n executorResults: Record</* executor */ string, unknown>;\n }\n>;\n\n/**\n * Creates a generation manager.\n * @param params - Parameters for creating the generation manager\n * @param params.application - Application instance to generate code for\n * @param params.config - Loaded configuration\n * @param params.generators - Code generators to run\n * @param params.pluginManager - Plugin manager for processing plugins\n * @returns GenerationManager instance\n */\nexport function createGenerationManager(params: {\n application: Application;\n config: LoadedConfig;\n generators?: Generator[];\n pluginManager?: PluginManager;\n}): GenerationManager {\n const { application, config, generators = [], pluginManager } = params;\n const baseDir = path.join(getDistDir(), \"generated\");\n fs.mkdirSync(baseDir, { recursive: true });\n\n const services: {\n tailordb: Record<string, TypeInfo>;\n resolver: Record<string, Record<string, Resolver>>;\n executor: Record<string, Executor>;\n } = { tailordb: {}, resolver: {}, executor: {} };\n\n let watcher: DependencyWatcher | null = null;\n const generatorResults: GeneratorResults = {};\n\n // Get plugins that have generation hooks\n const generationPlugins = pluginManager?.getPluginsWithGenerationHooks() ?? [];\n\n // Returns generators that subscribe to the given dependency phase\n function getReadyGenerators(dep: DependencyKind): Generator[] {\n return generators.filter((g) => (g as AnyCodeGenerator).dependencies.includes(dep));\n }\n\n function getAuthInput(): GeneratorAuthInput | undefined {\n const authService = application.authService;\n if (!authService) return undefined;\n\n const authConfig = authService.parsedConfig;\n const userProfile = authService.userProfile;\n return {\n name: authConfig.name,\n userProfile: userProfile\n ? {\n typeName: userProfile.type.name,\n namespace: userProfile.namespace,\n usernameField: userProfile.usernameField,\n }\n : undefined,\n machineUsers: authConfig.machineUsers,\n oauth2Clients: authConfig.oauth2Clients,\n idProvider: authConfig.idProvider,\n };\n }\n\n // =========================================================================\n // Generator processing (unchanged - per-type/perNS/aggregate pipeline)\n // =========================================================================\n\n async function processTailorDBNamespace(\n gen: AnyCodeGenerator,\n namespace: string,\n typeInfo: TypeInfo,\n ): Promise<void> {\n const results = generatorResults[gen.id];\n results.tailordbResults[namespace] = {};\n\n // Check if generator has processType method\n if (!gen.processType) {\n return;\n }\n\n const processType = gen.processType;\n await Promise.allSettled(\n Object.entries(typeInfo.types).map(async ([typeName, type]) => {\n try {\n results.tailordbResults[namespace][typeName] = await processType({\n type,\n namespace,\n source: typeInfo.sourceInfo[typeName],\n plugins: typeInfo.pluginAttachments.get(typeName) ?? [],\n });\n } catch (error) {\n logger.error(\n `Error processing type ${styles.bold(typeName)} in ${namespace} with generator ${gen.id}`,\n );\n logger.error(String(error));\n }\n }),\n );\n\n // Process namespace summary if available\n if (\"processTailorDBNamespace\" in gen && typeof gen.processTailorDBNamespace === \"function\") {\n try {\n results.tailordbNamespaceResults[namespace] = await gen.processTailorDBNamespace({\n namespace,\n types: results.tailordbResults[namespace],\n });\n } catch (error) {\n logger.error(\n `Error processing TailorDB namespace ${styles.bold(namespace)} with generator ${gen.id}`,\n );\n logger.error(String(error));\n }\n } else {\n results.tailordbNamespaceResults[namespace] = results.tailordbResults[namespace];\n }\n }\n\n async function processResolverNamespace(\n gen: AnyCodeGenerator,\n namespace: string,\n resolvers: Record<string, Resolver>,\n ): Promise<void> {\n const results = generatorResults[gen.id];\n results.resolverResults[namespace] = {};\n\n // Check if generator has processResolver method\n if (!gen.processResolver) {\n return;\n }\n\n const processResolver = gen.processResolver;\n // Process individual resolvers\n await Promise.allSettled(\n Object.entries(resolvers).map(async ([resolverName, resolver]) => {\n try {\n results.resolverResults[namespace][resolverName] = await processResolver({\n resolver,\n namespace,\n });\n } catch (error) {\n logger.error(\n `Error processing resolver ${styles.bold(resolverName)} in ${namespace} with generator ${gen.id}`,\n );\n logger.error(String(error));\n }\n }),\n );\n\n // Process namespace summary if available\n if (\"processResolverNamespace\" in gen && typeof gen.processResolverNamespace === \"function\") {\n try {\n results.resolverNamespaceResults[namespace] = await gen.processResolverNamespace({\n namespace,\n resolvers: results.resolverResults[namespace],\n });\n } catch (error) {\n logger.error(\n `Error processing Resolver namespace ${styles.bold(namespace)} with generator ${gen.id}`,\n );\n logger.error(String(error));\n }\n } else {\n results.resolverNamespaceResults[namespace] = results.resolverResults[namespace];\n }\n }\n\n async function processExecutors(gen: AnyCodeGenerator): Promise<void> {\n const results = generatorResults[gen.id];\n\n // Check if generator has processExecutor method\n if (!gen.processExecutor) {\n return;\n }\n\n const processExecutor = gen.processExecutor;\n // Process individual executors\n await Promise.allSettled(\n Object.entries(services.executor).map(async ([executorId, executor]) => {\n try {\n results.executorResults[executorId] = await processExecutor(executor);\n } catch (error) {\n logger.error(\n `Error processing executor ${styles.bold(executor.name)} with generator ${gen.id}`,\n );\n logger.error(String(error));\n }\n }),\n );\n }\n\n async function aggregate(gen: AnyCodeGenerator): Promise<void> {\n const results = generatorResults[gen.id];\n\n const tailordbResults: TailorDBNamespaceResult<unknown>[] = [];\n const resolverResults: ResolverNamespaceResult<unknown>[] = [];\n\n // Collect TailorDB namespace results\n for (const [namespace, types] of Object.entries(results.tailordbNamespaceResults)) {\n tailordbResults.push({\n namespace,\n types,\n });\n }\n\n // Collect Resolver namespace results\n for (const [namespace, resolvers] of Object.entries(results.resolverNamespaceResults)) {\n resolverResults.push({\n namespace,\n resolvers,\n });\n }\n\n // Build input based on generator dependencies\n const input: Record<string, unknown> = {\n auth: getAuthInput(),\n };\n\n if (hasDependency(gen, \"tailordb\")) {\n input.tailordb = tailordbResults;\n }\n if (hasDependency(gen, \"resolver\")) {\n input.resolver = resolverResults;\n }\n if (hasDependency(gen, \"executor\")) {\n input.executor = Object.values(results.executorResults);\n }\n\n // Call generator's aggregate method\n const result = await gen.aggregate({\n input: input as Parameters<typeof gen.aggregate>[0][\"input\"],\n baseDir: path.join(baseDir, gen.id),\n configPath: config.path,\n });\n\n // Write generated files\n await writeGeneratedFiles(gen.id, result);\n }\n\n // =========================================================================\n // Plugin phase-complete hook runner\n // =========================================================================\n\n /**\n * Build TailorDB namespace data array from loaded services.\n * @returns Array of TailorDB namespace data\n */\n function buildTailorDBData(): TailorDBNamespaceData[] {\n return Object.entries(services.tailordb).map(([namespace, info]) => ({\n namespace,\n types: info.types,\n sourceInfo: new Map(Object.entries(info.sourceInfo)),\n pluginAttachments: info.pluginAttachments,\n }));\n }\n\n /**\n * Build resolver namespace data array from loaded services.\n * @returns Array of resolver namespace data\n */\n function buildResolverData(): ResolverNamespaceData[] {\n return Object.entries(services.resolver).map(([namespace, resolvers]) => ({\n namespace,\n resolvers,\n }));\n }\n\n /**\n * Run a plugin's phase-complete hook and write any generated files.\n * @param plugin - Plugin to run the hook on\n * @param hookName - Name of the hook to call\n * @returns Promise that resolves when hook completes\n */\n async function runPluginPhaseHook(\n plugin: Plugin,\n hookName: \"onTailorDBReady\" | \"onResolverReady\" | \"onExecutorReady\",\n ): Promise<void> {\n const hook = plugin[hookName];\n if (!hook) return;\n\n const pluginBaseDir = path.join(baseDir, plugin.id);\n const auth = getAuthInput();\n const tailordb = buildTailorDBData();\n\n let result: GeneratorResult;\n\n switch (hookName) {\n case \"onTailorDBReady\":\n result = await plugin.onTailorDBReady!({\n tailordb,\n auth,\n baseDir: pluginBaseDir,\n configPath: config.path,\n pluginConfig: plugin.pluginConfig,\n });\n break;\n case \"onResolverReady\":\n result = await plugin.onResolverReady!({\n tailordb,\n resolvers: buildResolverData(),\n auth,\n baseDir: pluginBaseDir,\n configPath: config.path,\n pluginConfig: plugin.pluginConfig,\n });\n break;\n case \"onExecutorReady\":\n result = await plugin.onExecutorReady!({\n tailordb,\n resolvers: buildResolverData(),\n executors: { ...services.executor },\n auth,\n baseDir: pluginBaseDir,\n configPath: config.path,\n pluginConfig: plugin.pluginConfig,\n });\n break;\n }\n\n await writeGeneratedFiles(plugin.id, result);\n }\n\n /**\n * Run a specific generation-time hook for all plugins that implement it.\n * Each hook runs at its natural pipeline phase, ensuring outputs from earlier\n * phases are available when later phases load resolvers/executors.\n * @param hookName - Name of the hook to call\n * @param watch - Whether running in watch mode (suppresses throws)\n */\n async function runPluginHook(\n hookName: \"onTailorDBReady\" | \"onResolverReady\" | \"onExecutorReady\",\n watch: boolean,\n ): Promise<void> {\n const plugins = generationPlugins.filter((p) => p[hookName] != null);\n if (plugins.length === 0) return;\n const results = await Promise.allSettled(\n plugins.map(async (plugin) => {\n try {\n await runPluginPhaseHook(plugin, hookName);\n } catch (error) {\n logger.error(`Error processing plugin ${styles.bold(plugin.id)} (${hookName})`);\n logger.error(String(error));\n if (!watch) {\n throw error;\n }\n }\n }),\n );\n if (!watch) {\n const failures = results.filter((r): r is PromiseRejectedResult => r.status === \"rejected\");\n if (failures.length > 0) {\n throw new AggregateError(failures.map((f) => f.reason));\n }\n }\n }\n\n // =========================================================================\n // Shared file writing\n // =========================================================================\n\n /**\n * Write generated files to disk.\n * @param sourceId - Generator or plugin ID for logging\n * @param result - Generator result containing files to write\n */\n async function writeGeneratedFiles(sourceId: string, result: GeneratorResult): Promise<void> {\n await Promise.all(\n result.files.map(async (file) => {\n fs.mkdirSync(path.dirname(file.path), { recursive: true });\n return new Promise<void>((resolve, reject) => {\n if (file.skipIfExists && fs.existsSync(file.path)) {\n const relativePath = path.relative(process.cwd(), file.path);\n logger.debug(`${sourceId} | skip existing: ${relativePath}`);\n return resolve();\n }\n\n fs.writeFile(file.path, file.content, (err) => {\n if (err) {\n const relativePath = path.relative(process.cwd(), file.path);\n logger.error(`Error writing file ${styles.bold(relativePath)}`);\n logger.error(String(err));\n reject(err);\n } else {\n const relativePath = path.relative(process.cwd(), file.path);\n logger.log(`${sourceId} | generate: ${styles.success(relativePath)}`);\n // Set executable permission if requested\n if (file.executable) {\n fs.chmod(file.path, 0o755, (chmodErr) => {\n if (chmodErr) {\n const relativePath = path.relative(process.cwd(), file.path);\n logger.error(\n `Error setting executable permission on ${styles.bold(relativePath)}`,\n );\n logger.error(String(chmodErr));\n reject(chmodErr);\n } else {\n resolve();\n }\n });\n } else {\n resolve();\n }\n }\n });\n });\n }),\n );\n }\n\n // =========================================================================\n // Generator orchestration\n // =========================================================================\n\n async function processGenerator(gen: AnyCodeGenerator): Promise<void> {\n generatorResults[gen.id] = {\n tailordbResults: {},\n resolverResults: {},\n tailordbNamespaceResults: {},\n resolverNamespaceResults: {},\n executorResults: {},\n };\n\n // Process TailorDB if generator has tailordb dependency\n if (hasDependency(gen, \"tailordb\")) {\n for (const [namespace, types] of Object.entries(services.tailordb)) {\n await processTailorDBNamespace(gen, namespace, types);\n }\n }\n\n // Process Resolver if generator has resolver dependency\n if (hasDependency(gen, \"resolver\")) {\n for (const [namespace, resolvers] of Object.entries(services.resolver)) {\n await processResolverNamespace(gen, namespace, resolvers);\n }\n }\n\n // Process Executors if generator has executor dependency\n if (hasDependency(gen, \"executor\")) {\n await processExecutors(gen);\n }\n\n // Aggregate all results\n await aggregate(gen);\n }\n\n async function runGenerators(gens: Generator[], watch: boolean): Promise<void> {\n const results = await Promise.allSettled(\n gens.map(async (gen) => {\n await withSpan(`generate.generator.${gen.id}`, async () => {\n try {\n await processGenerator(gen as AnyCodeGenerator);\n } catch (error) {\n logger.error(`Error processing generator ${styles.bold(gen.id)}`);\n logger.error(String(error));\n if (!watch) {\n throw error;\n }\n }\n });\n }),\n );\n if (!watch) {\n const failures = results.filter((r): r is PromiseRejectedResult => r.status === \"rejected\");\n if (failures.length > 0) {\n throw new AggregateError(failures.map((f) => f.reason));\n }\n }\n }\n\n async function restartWatchProcess(): Promise<void> {\n logger.newline();\n logger.info(\"Restarting watch process to clear module cache...\", {\n mode: \"stream\",\n });\n logger.newline();\n\n // Clean up watcher first\n if (watcher) {\n await watcher.stop();\n }\n\n // Spawn a new process with the same arguments\n const args = process.argv.slice(2);\n const env = {\n ...process.env,\n TAILOR_WATCH_GENERATION: (\n parseInt(process.env.TAILOR_WATCH_GENERATION || \"0\", 10) + 1\n ).toString(),\n };\n\n const child = spawn(process.argv[0], [process.argv[1], ...args], {\n stdio: \"inherit\",\n env,\n detached: false,\n });\n\n // Forward signals to child\n const forwardSignal = (signal: NodeJS.Signals) => {\n child.kill(signal);\n };\n\n process.on(\"SIGINT\", forwardSignal);\n process.on(\"SIGTERM\", forwardSignal);\n\n // Wait for child to exit, then exit parent\n child.on(\"exit\", (code) => {\n process.exit(code || 0);\n });\n\n // Don't exit immediately - let child handle everything\n }\n\n return {\n application,\n baseDir,\n generators,\n services,\n generatorResults,\n processGenerator,\n processTailorDBNamespace,\n processResolverNamespace,\n processExecutors,\n aggregate,\n\n async generate(watch: boolean): Promise<void> {\n logger.newline();\n logger.log(`Generation for application: ${styles.highlight(application.config.name)}`);\n\n const app = application;\n\n // Load TailorDB types (includes plugin-generated types)\n await withSpan(\"generate.loadTailorDBTypes\", async (span) => {\n span.setAttribute(\"generate.namespace_count\", app.tailorDBServices.length);\n for (const db of app.tailorDBServices) {\n const namespace = db.namespace;\n await withSpan(`generate.loadTypes.${namespace}`, async () => {\n try {\n await db.loadTypes();\n\n // Process namespace plugins after loading types\n // These plugins generate types without requiring a source type\n await db.processNamespacePlugins();\n\n services.tailordb[namespace] = {\n types: db.types,\n sourceInfo: db.typeSourceInfo,\n pluginAttachments: db.pluginAttachments,\n };\n } catch (error) {\n logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);\n logger.error(String(error));\n if (!watch) {\n throw error;\n }\n }\n });\n }\n });\n\n // Generate plugin type and executor files\n // This must happen after TailorDB types are loaded since plugins process during type loading\n const { pluginExecutorFiles, executorService } = await withSpan(\n \"generate.pluginFiles\",\n async () => {\n const pluginExecutorFiles = generatePluginFilesIfNeeded(\n pluginManager,\n app.tailorDBServices,\n config.path,\n );\n const executorService =\n app.executorService ??\n (pluginExecutorFiles.length > 0\n ? createExecutorService({ config: { files: [] } })\n : undefined);\n return { pluginExecutorFiles, executorService };\n },\n );\n\n // Resolve Auth namespaces (depends on TailorDB)\n if (app.authService) {\n await withSpan(\"generate.resolveAuthNamespaces\", async () =>\n app.authService!.resolveNamespaces(),\n );\n }\n\n // Add blank line after TailorDB types loaded\n if (app.tailorDBServices.length > 0 || pluginExecutorFiles.length > 0) {\n logger.newline();\n }\n\n // Run generators + plugin hooks for onTailorDBReady\n const readyAfterTailorDB = getReadyGenerators(\"tailordb\");\n const hasOnTailorDBReady = generationPlugins.some((p) => p.onTailorDBReady != null);\n if (readyAfterTailorDB.length > 0 || hasOnTailorDBReady) {\n await withSpan(\"generate.onTailorDBReady\", async () => {\n await Promise.all([\n runGenerators(readyAfterTailorDB, watch),\n runPluginHook(\"onTailorDBReady\", watch),\n ]);\n });\n logger.newline();\n }\n\n // Load Resolvers (can now import generated files)\n await withSpan(\"generate.loadResolvers\", async () => {\n for (const resolverService of app.resolverServices) {\n const namespace = resolverService.namespace;\n await withSpan(`generate.loadResolvers.${namespace}`, async () => {\n try {\n await resolverService.loadResolvers();\n services.resolver[namespace] = {};\n Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {\n services.resolver[namespace][resolver.name] = resolver;\n });\n } catch (error) {\n logger.error(\n `Error loading resolvers for Resolver service ${styles.bold(namespace)}`,\n );\n logger.error(String(error));\n if (!watch) {\n throw error;\n }\n }\n });\n }\n });\n\n // Run generators + plugin hooks for onResolverReady\n const readyAfterResolvers = getReadyGenerators(\"resolver\");\n const hasOnResolverReady = generationPlugins.some((p) => p.onResolverReady != null);\n if (readyAfterResolvers.length > 0 || hasOnResolverReady) {\n await withSpan(\"generate.onResolversReady\", async () => {\n await Promise.all([\n runGenerators(readyAfterResolvers, watch),\n runPluginHook(\"onResolverReady\", watch),\n ]);\n });\n logger.newline();\n }\n\n // Load Executors (can now import generated files)\n await withSpan(\"generate.loadExecutors\", async () => {\n if (executorService) {\n await executorService.loadExecutors();\n // Load plugin-generated executors from generated TypeScript files\n if (pluginExecutorFiles.length > 0) {\n await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);\n }\n }\n // Get all executors (file-based and plugin-generated)\n const allExecutors = executorService?.executors ?? {};\n Object.entries(allExecutors).forEach(([key, executor]) => {\n services.executor[key] = executor as Executor;\n });\n });\n\n // Run generators + plugin hooks for onExecutorReady\n const readyAfterExecutors = getReadyGenerators(\"executor\");\n const hasOnExecutorReady = generationPlugins.some((p) => p.onExecutorReady != null);\n if (readyAfterExecutors.length > 0 || hasOnExecutorReady) {\n await withSpan(\"generate.onExecutorsReady\", async () => {\n await Promise.all([\n runGenerators(readyAfterExecutors, watch),\n runPluginHook(\"onExecutorReady\", watch),\n ]);\n });\n logger.newline();\n }\n },\n\n async watch(): Promise<void> {\n watcher = createDependencyWatcher();\n\n // Set up restart callback\n watcher.setRestartCallback(() => {\n restartWatchProcess();\n });\n\n // Watch config file\n await watcher.addWatchGroup(\"Config\", [config.path]);\n\n // Watch application services\n const app = application;\n\n // Watch TailorDB services\n for (const db of app.tailorDBServices) {\n const dbNamespace = db.namespace;\n await watcher?.addWatchGroup(`TailorDB/${dbNamespace}`, db.config.files);\n }\n\n // Watch Resolver services\n for (const resolverService of app.resolverServices) {\n const resolverNamespace = resolverService.namespace;\n await watcher?.addWatchGroup(\n `Resolver/${resolverNamespace}`,\n resolverService[\"config\"].files,\n );\n }\n\n // Keep the process running\n await new Promise(() => {});\n },\n };\n}\n\n/**\n * Run code generation using the Tailor configuration and generators.\n * @param options - Generation options\n * @returns Promise that resolves when generation (and watch, if enabled) completes\n */\nexport async function generate(options?: GenerateOptions) {\n return withSpan(\"generate\", async (rootSpan) => {\n // Load and validate options\n const { config, generators, plugins } = await withSpan(\"generate.loadConfig\", async () =>\n loadConfig(options?.configPath),\n );\n const watch = options?.watch ?? false;\n\n rootSpan.setAttribute(\"generate.watch\", watch);\n rootSpan.setAttribute(\"generate.generators.count\", generators.length);\n\n // Generate user types from loaded config\n await withSpan(\"generate.generateUserTypes\", async () =>\n generateUserTypes({ config, configPath: config.path }),\n );\n\n // Initialize plugin manager if plugins are provided\n let pluginManager: PluginManager | undefined;\n if (plugins.length > 0) {\n pluginManager = new PluginManager(plugins);\n }\n\n // Create a lightweight application (types not yet loaded)\n const application = defineApplication({ config, pluginManager });\n\n rootSpan.setAttribute(\"app.name\", application.config.name);\n\n const manager = createGenerationManager({ application, config, generators, pluginManager });\n await manager.generate(watch);\n if (watch) {\n await manager.watch();\n }\n });\n}\n","import { toJson } from \"@bufbuild/protobuf\";\nimport { timestampDate, ValueSchema } from \"@bufbuild/protobuf/wkt\";\nimport { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport type { MachineUser } from \"@tailor-proto/tailor/v1/auth_resource_pb\";\n\nexport interface ListMachineUsersOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\nexport interface MachineUserInfo {\n name: string;\n clientId: string;\n clientSecret: string;\n createdAt: Date | null;\n updatedAt: Date | null;\n attributes: Record<string, unknown>;\n}\n\n/**\n * Map a MachineUser protobuf message to CLI-friendly info.\n * @param user - Machine user resource\n * @returns Flattened machine user info\n */\nfunction machineUserInfo(user: MachineUser): MachineUserInfo {\n return {\n name: user.name,\n clientId: user.clientId,\n clientSecret: user.clientSecret,\n createdAt: user.createdAt ? timestampDate(user.createdAt) : null,\n updatedAt: user.updatedAt ? timestampDate(user.updatedAt) : null,\n attributes: Object.fromEntries(\n Object.entries(user.attributeMap).map(([key, value]) => [key, toJson(ValueSchema, value)]),\n ),\n };\n}\n\n/**\n * List machine users for the current application.\n * @param options - Machine user listing options\n * @returns List of machine users\n */\nexport async function listMachineUsers(\n options?: ListMachineUsersOptions,\n): Promise<MachineUserInfo[]> {\n // Load and validate options\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n // Get application\n const { config } = await loadConfig(options?.configPath);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n // Fetch all machine users\n const machineUsers = await fetchAll(async (pageToken, maxPageSize) => {\n const { machineUsers, nextPageToken } = await client.listAuthMachineUsers({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n authNamespace: application.authNamespace,\n });\n return [machineUsers, nextPageToken];\n });\n\n return machineUsers.map(machineUserInfo);\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List all machine users in the application.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Execute machineuser list logic\n const machineUsers = await listMachineUsers({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n // Show machine users info\n logger.out(machineUsers, { display: { createdAt: null, updatedAt: null } });\n }),\n});\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { fetchMachineUserToken, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nexport interface GetMachineUserTokenOptions {\n name: string;\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\nexport interface MachineUserTokenInfo {\n accessToken: string;\n tokenType: string;\n expiresAt: string;\n}\n\n/**\n * Get a machine user access token for the current application.\n * @param options - Token retrieval options\n * @returns Machine user token info\n */\nexport async function getMachineUserToken(\n options: GetMachineUserTokenOptions,\n): Promise<MachineUserTokenInfo> {\n // Load and validate options\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n // Get application\n const { config } = await loadConfig(options.configPath);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n // Get machine user\n const { machineUser } = await client.getAuthMachineUser({\n workspaceId,\n authNamespace: application.authNamespace,\n name: options.name,\n });\n if (!machineUser) {\n throw new Error(`Machine user ${options.name} not found.`);\n }\n\n // Fetch machine user token\n const resp = await fetchMachineUserToken(\n application.url,\n machineUser.clientId,\n machineUser.clientSecret,\n );\n const expiresAt = new Date();\n expiresAt.setSeconds(expiresAt.getSeconds() + resp.expires_in);\n\n return {\n accessToken: resp.access_token,\n tokenType: resp.token_type,\n expiresAt: expiresAt.toISOString(),\n };\n}\n\nexport const tokenCommand = defineCommand({\n name: \"token\",\n description: \"Get an access token for a machine user.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n name: arg(z.string(), {\n positional: true,\n description: \"Machine user name\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Execute machineuser token logic\n const token = await getMachineUserToken({\n name: args.name,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n // Show machine user token info\n // TODO: remove this transformation\n const tokenInfo = {\n access_token: token.accessToken,\n token_type: token.tokenType,\n expires_at: token.expiresAt,\n };\n logger.out(tokenInfo);\n }),\n});\n","import { timestampDate } from \"@bufbuild/protobuf/wkt\";\nimport {\n type AuthOAuth2Client,\n AuthOAuth2Client_GrantType,\n} from \"@tailor-proto/tailor/v1/auth_resource_pb\";\n\nconst grantTypeToString = (grantType: AuthOAuth2Client_GrantType): string => {\n switch (grantType) {\n case AuthOAuth2Client_GrantType.AUTHORIZATION_CODE:\n return \"authorization_code\";\n case AuthOAuth2Client_GrantType.REFRESH_TOKEN:\n return \"refresh_token\";\n default:\n return \"unknown\";\n }\n};\n\nexport interface OAuth2ClientInfo {\n name: string;\n description: string;\n clientId: string;\n grantTypes: string[];\n redirectUris: string[];\n createdAt: Date | null;\n}\n\nexport interface OAuth2ClientCredentials {\n name: string;\n description: string;\n clientId: string;\n clientSecret: string;\n grantTypes: string[];\n redirectUris: string[];\n createdAt: Date | null;\n}\n\n/**\n * Transform an AuthOAuth2Client into CLI-friendly OAuth2 client info.\n * @param client - OAuth2 client resource\n * @returns Flattened OAuth2 client info\n */\nexport function toOAuth2ClientInfo(client: AuthOAuth2Client): OAuth2ClientInfo {\n return {\n name: client.name,\n description: client.description,\n clientId: client.clientId,\n grantTypes: client.grantTypes.map(grantTypeToString),\n redirectUris: client.redirectUris,\n createdAt: client.createdAt ? timestampDate(client.createdAt) : null,\n };\n}\n\n/**\n * Transform an AuthOAuth2Client into OAuth2 client credentials info.\n * @param client - OAuth2 client resource\n * @returns OAuth2 client credentials\n */\nexport function toOAuth2ClientCredentials(client: AuthOAuth2Client): OAuth2ClientCredentials {\n return {\n name: client.name,\n description: client.description,\n clientId: client.clientId,\n clientSecret: client.clientSecret,\n grantTypes: client.grantTypes.map(grantTypeToString),\n redirectUris: client.redirectUris,\n createdAt: client.createdAt ? timestampDate(client.createdAt) : null,\n };\n}\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { type OAuth2ClientCredentials, toOAuth2ClientCredentials } from \"./transform\";\n\nexport interface GetOAuth2ClientOptions {\n name: string;\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\n/**\n * Get OAuth2 client credentials for the current application.\n * @param options - OAuth2 client lookup options\n * @returns OAuth2 client credentials\n */\nexport async function getOAuth2Client(\n options: GetOAuth2ClientOptions,\n): Promise<OAuth2ClientCredentials> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n const { config } = await loadConfig(options.configPath);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n try {\n const { oauth2Client } = await client.getAuthOAuth2Client({\n workspaceId,\n namespaceName: application.authNamespace,\n name: options.name,\n });\n\n return toOAuth2ClientCredentials(oauth2Client!);\n } catch (error) {\n if (error instanceof ConnectError && error.code === Code.NotFound) {\n throw new Error(`OAuth2 client '${options.name}' not found.`);\n }\n throw error;\n }\n}\n\nexport const getCommand = defineCommand({\n name: \"get\",\n description: \"Get OAuth2 client credentials (including client secret).\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n name: arg(z.string(), {\n positional: true,\n description: \"OAuth2 client name\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const credentials = await getOAuth2Client({\n name: args.name,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n logger.out(credentials);\n }),\n});\n","import { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { type OAuth2ClientInfo, toOAuth2ClientInfo } from \"./transform\";\n\nexport interface ListOAuth2ClientsOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\n/**\n * List OAuth2 clients for the current application.\n * @param options - OAuth2 client listing options\n * @returns List of OAuth2 clients\n */\nexport async function listOAuth2Clients(\n options?: ListOAuth2ClientsOptions,\n): Promise<OAuth2ClientInfo[]> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const { config } = await loadConfig(options?.configPath);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n const oauth2Clients = await fetchAll(async (pageToken, maxPageSize) => {\n const { oauth2Clients, nextPageToken } = await client.listAuthOAuth2Clients({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n namespaceName: application.authNamespace,\n });\n return [oauth2Clients, nextPageToken];\n });\n\n return oauth2Clients.map(toOAuth2ClientInfo);\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List all OAuth2 clients in the application.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const oauth2Clients = await listOAuth2Clients({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n logger.out(oauth2Clients);\n }),\n});\n","import ml from \"multiline-ts\";\nimport { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { applyApplication, planApplication } from \"@/cli/commands/apply/application\";\nimport { type PlanContext } from \"@/cli/commands/apply/apply\";\nimport { applyAuth, planAuth } from \"@/cli/commands/apply/auth\";\nimport { applyExecutor, planExecutor } from \"@/cli/commands/apply/executor\";\nimport {\n applyFunctionRegistry,\n planFunctionRegistry,\n} from \"@/cli/commands/apply/function-registry\";\nimport { applyIdP, planIdP } from \"@/cli/commands/apply/idp\";\nimport { applyPipeline, planPipeline } from \"@/cli/commands/apply/resolver\";\nimport { applySecretManager, planSecretManager } from \"@/cli/commands/apply/secret-manager\";\nimport { applyStaticWebsite, planStaticWebsite } from \"@/cli/commands/apply/staticwebsite\";\nimport { applyTailorDB, planTailorDB } from \"@/cli/commands/apply/tailordb\";\nimport { applyWorkflow, planWorkflow } from \"@/cli/commands/apply/workflow\";\nimport { type Application, defineApplication } from \"@/cli/services/application\";\nimport { commonArgs, confirmationArgs, deploymentArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient, type OperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig, type LoadedConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nexport interface RemoveOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\nasync function loadOptions(options?: RemoveOptions) {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n const { config } = await loadConfig(options?.configPath);\n const application = defineApplication({ config });\n return {\n client,\n workspaceId,\n application,\n config,\n };\n}\n\nasync function execRemove(\n client: OperatorClient,\n workspaceId: string,\n application: Application,\n config: LoadedConfig,\n confirm?: () => Promise<void>,\n) {\n // Plan all resources with forRemoval=true\n const ctx: PlanContext = {\n client,\n workspaceId,\n application,\n forRemoval: true,\n config,\n };\n const tailorDB = await planTailorDB(ctx);\n const staticWebsite = await planStaticWebsite(ctx);\n const idp = await planIdP(ctx);\n const auth = await planAuth(ctx);\n const pipeline = await planPipeline(ctx);\n const app = await planApplication(ctx);\n const executor = await planExecutor(ctx);\n const workflow = await planWorkflow(client, workspaceId, application.name, {}, {});\n const functionRegistry = await planFunctionRegistry(client, workspaceId, application.name, []);\n const secretManager = await planSecretManager(ctx);\n\n if (\n tailorDB.changeSet.service.deletes.length === 0 &&\n staticWebsite.changeSet.deletes.length === 0 &&\n idp.changeSet.service.deletes.length === 0 &&\n auth.changeSet.service.deletes.length === 0 &&\n pipeline.changeSet.service.deletes.length === 0 &&\n app.deletes.length === 0 &&\n executor.changeSet.deletes.length === 0 &&\n workflow.changeSet.deletes.length === 0 &&\n functionRegistry.changeSet.deletes.length === 0 &&\n secretManager.vaultChangeSet.deletes.length === 0 &&\n secretManager.secretChangeSet.deletes.length === 0\n ) {\n return;\n }\n\n // Confirm deletion\n if (confirm) {\n await confirm();\n }\n\n // Apply deletions in reverse order of dependencies\n await applyWorkflow(client, workflow, \"delete\");\n await applyExecutor(client, executor, \"delete\");\n await applyStaticWebsite(client, staticWebsite, \"delete\");\n await applyApplication(client, app, \"delete\");\n await applyPipeline(client, pipeline, \"delete-resources\");\n await applyPipeline(client, pipeline, \"delete-services\");\n await applyAuth(client, auth, \"delete-resources\");\n await applyAuth(client, auth, \"delete-services\");\n await applyIdP(client, idp, \"delete-resources\");\n await applyIdP(client, idp, \"delete-services\");\n await applyTailorDB(client, tailorDB, \"delete-resources\");\n await applyTailorDB(client, tailorDB, \"delete-services\");\n await applyFunctionRegistry(client, workspaceId, functionRegistry, \"delete\");\n await applySecretManager(client, secretManager, \"delete\");\n}\n\n/**\n * Remove all resources managed by the current application.\n * @param options - Remove options\n * @returns Promise that resolves when removal completes\n */\nexport async function remove(options?: RemoveOptions): Promise<void> {\n const { client, workspaceId, application, config } = await loadOptions(options);\n await execRemove(client, workspaceId, application, config);\n}\n\nexport const removeCommand = defineCommand({\n name: \"remove\",\n description: \"Remove all resources managed by the application from the workspace.\",\n args: z\n .object({\n ...commonArgs,\n ...deploymentArgs,\n ...confirmationArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const { client, workspaceId, application, config } = await loadOptions({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n logger.info(`Planning removal of resources managed by \"${application.name}\"...`);\n logger.newline();\n\n await execRemove(client, workspaceId, application, config, async () => {\n if (!args.yes) {\n const confirmed = await logger.prompt(\"Are you sure you want to remove all resources?\", {\n type: \"confirm\",\n initial: false,\n });\n if (!confirmed) {\n throw new Error(ml`\n Remove cancelled. No resources were deleted.\n To override, run again and confirm, or use --yes flag.\n `);\n }\n } else {\n logger.success(\"Removing all resources (--yes flag specified)...\");\n }\n });\n\n logger.success(`Successfully removed all resources managed by \"${application.name}\".`);\n }),\n});\n","import { timestampDate } from \"@bufbuild/protobuf/wkt\";\nimport { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport type { Application } from \"@tailor-proto/tailor/v1/application_resource_pb\";\n\nexport interface ShowOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n}\n\nexport interface WorkspaceInfo {\n workspaceId: string;\n workspaceName: string;\n workspaceRegion?: string;\n}\n\nexport interface ApplicationInfo {\n name: string;\n domain: string;\n url: string;\n auth: string;\n cors: string[];\n allowedIpAddresses: string[];\n disableIntrospection: boolean;\n createdAt: Date | null;\n updatedAt: Date | null;\n}\n\nexport interface ShowInfo extends ApplicationInfo, WorkspaceInfo {}\n\nfunction applicationInfo(app: Application): ApplicationInfo {\n return {\n name: app.name,\n domain: app.domain,\n url: app.url,\n auth: app.authNamespace,\n cors: app.cors,\n allowedIpAddresses: app.allowedIpAddresses,\n disableIntrospection: app.disableIntrospection,\n createdAt: app.createTime ? timestampDate(app.createTime) : null,\n updatedAt: app.updateTime ? timestampDate(app.updateTime) : null,\n };\n}\n\n/**\n * Show applied application information for the current workspace.\n * @param options - Show options\n * @returns Application information\n */\nexport async function show(options?: ShowOptions): Promise<ShowInfo> {\n // Load and validate options\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const { config } = await loadConfig(options?.configPath);\n const [workspaceResp, resp] = await Promise.all([\n client.getWorkspace({\n workspaceId,\n }),\n client.getApplication({\n workspaceId,\n applicationName: config.name,\n }),\n ]);\n const { name, ...appInfo } = applicationInfo(resp.application!);\n\n return {\n name,\n workspaceId,\n workspaceName: workspaceResp.workspace?.name ?? \"\",\n workspaceRegion: workspaceResp.workspace?.region ?? \"\",\n ...appInfo,\n };\n}\n\nexport const showCommand = defineCommand({\n name: \"show\",\n description: \"Show information about the deployed application.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Execute show logic\n const appInfo = await show({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n });\n\n logger.out(appInfo);\n }),\n});\n","import { logger } from \"./logger\";\n\n/**\n * Warn that a feature is in beta.\n * @param {string} featureName - Name of the beta feature (e.g., \"tailordb erd\", \"tailordb migration\")\n */\nexport function logBetaWarning(featureName: string): void {\n logger.warn(\n `The '${featureName}' command is a beta feature and may introduce breaking changes in future releases.`,\n );\n logger.newline();\n}\n","import { spawn } from \"node:child_process\";\n\nconst DEFAULT_EDITOR = \"editor\";\n\nfunction normalizeEditorCommand(editor: string | undefined): string | undefined {\n const normalized = editor?.trim();\n return normalized && normalized.length > 0 ? normalized : undefined;\n}\n\n/**\n * Resolve an editor command only from explicit environment variables.\n * @returns Configured editor command, if any\n */\nexport function getConfiguredEditorCommand(): string | undefined {\n return normalizeEditorCommand(process.env.VISUAL) ?? normalizeEditorCommand(process.env.EDITOR);\n}\n\n/**\n * Resolve the editor command used for interactive file editing.\n * @returns Configured editor command or the system default fallback\n */\nexport function getEditorCommand(): string {\n return getConfiguredEditorCommand() ?? DEFAULT_EDITOR;\n}\n\nfunction parseEditorCommand(editor: string): {\n command: string;\n args: string[];\n} {\n const [command, ...args] = editor.trim().split(/\\s+/);\n\n if (!command) {\n throw new Error(\"Editor command is empty.\");\n }\n\n return {\n command,\n args,\n };\n}\n\n/**\n * Open a file in the resolved editor and wait for the process to exit.\n * @param filePath - File path to open\n * @param editor - Editor command string\n * @returns Whether an editor process was launched\n */\nexport async function openInEditor(\n filePath: string,\n editor = getEditorCommand(),\n): Promise<boolean> {\n const { command, args } = parseEditorCommand(editor);\n\n await new Promise<void>((resolve, reject) => {\n const child = spawn(command, [...args, filePath], {\n stdio: \"inherit\",\n detached: false,\n });\n\n child.once(\"error\", (error) => reject(error));\n child.once(\"close\", (code) => {\n if (code == null || code === 0) {\n resolve();\n return;\n }\n reject(new Error(`Editor exited with code ${code}.`));\n });\n });\n\n return true;\n}\n\n/**\n * Open a file only when an editor is explicitly configured in the environment.\n * @param filePath - File path to open\n * @returns Whether an editor process was launched\n */\nexport async function openInConfiguredEditor(filePath: string): Promise<boolean> {\n const editor = getConfiguredEditorCommand();\n if (!editor) {\n return false;\n }\n\n return await openInEditor(filePath, editor);\n}\n","/**\n * DB types generator for TailorDB migrations\n *\n * Generates db.ts file containing Kysely Transaction types\n * based on the schema snapshot at a specific migration point.\n */\n\nimport * as fs from \"node:fs/promises\";\nimport {\n getMigrationFilePath,\n type SchemaSnapshot,\n type SnapshotFieldConfig,\n type SnapshotType,\n} from \"./snapshot\";\nimport type { MigrationDiff } from \"./diff-calculator\";\n\n/**\n * Information about enum value changes\n */\ninterface EnumValueChange {\n /** Allowed values before the change */\n beforeValues: string[];\n /** Allowed values after the change */\n afterValues: string[];\n}\n\n/**\n * Information about breaking change fields that need special handling\n */\ninterface BreakingChangeFieldInfo {\n /** Map of typeName -> Set of fieldNames that are changing from optional to required */\n optionalToRequired: Map<string, Set<string>>;\n /** Map of typeName -> Map of fieldName -> SnapshotFieldConfig for newly added required fields */\n addedRequiredFields: Map<string, Map<string, SnapshotFieldConfig>>;\n /** Map of typeName -> Map of fieldName -> EnumValueChange for enum value changes */\n enumValueChanges: Map<string, Map<string, EnumValueChange>>;\n}\n\n/**\n * Extract breaking change field information from diff\n * @param {MigrationDiff} diff - Migration diff\n * @returns {BreakingChangeFieldInfo} Breaking change field information\n */\nfunction extractBreakingChangeFields(diff: MigrationDiff): BreakingChangeFieldInfo {\n const optionalToRequired = new Map<string, Set<string>>();\n const addedRequiredFields = new Map<string, Map<string, SnapshotFieldConfig>>();\n const enumValueChanges = new Map<string, Map<string, EnumValueChange>>();\n\n for (const change of diff.changes) {\n if (change.kind === \"field_modified\" && change.fieldName) {\n const before = change.before as SnapshotFieldConfig | undefined;\n const after = change.after as SnapshotFieldConfig | undefined;\n\n // Check if this is an optional -> required change\n if (before && after && !before.required && after.required) {\n if (!optionalToRequired.has(change.typeName)) {\n optionalToRequired.set(change.typeName, new Set());\n }\n optionalToRequired.get(change.typeName)!.add(change.fieldName);\n }\n\n // Check if this is an enum value change\n if (\n before &&\n after &&\n before.type === \"enum\" &&\n after.type === \"enum\" &&\n before.allowedValues &&\n after.allowedValues\n ) {\n // Check if there are any differences in allowed values\n const beforeValues = before.allowedValues.map((v) => v.value);\n const afterValues = after.allowedValues.map((v) => v.value);\n const beforeSet = new Set(beforeValues);\n const afterSet = new Set(afterValues);\n const hasChanges =\n beforeValues.some((v) => !afterSet.has(v)) || afterValues.some((v) => !beforeSet.has(v));\n\n if (hasChanges) {\n if (!enumValueChanges.has(change.typeName)) {\n enumValueChanges.set(change.typeName, new Map());\n }\n enumValueChanges.get(change.typeName)!.set(change.fieldName, {\n beforeValues,\n afterValues,\n });\n }\n }\n } else if (change.kind === \"field_added\" && change.fieldName) {\n const after = change.after as SnapshotFieldConfig | undefined;\n\n // Required field added is a breaking change - add it as optional in db.ts\n // so migration script can set values for existing records\n if (after && after.required) {\n if (!addedRequiredFields.has(change.typeName)) {\n addedRequiredFields.set(change.typeName, new Map());\n }\n addedRequiredFields.get(change.typeName)!.set(change.fieldName, after);\n }\n }\n }\n\n return { optionalToRequired, addedRequiredFields, enumValueChanges };\n}\n\n/**\n * Generate the complete db.ts file content from a schema snapshot\n * @param {SchemaSnapshot} snapshot - Schema snapshot to generate types from\n * @param {MigrationDiff} [diff] - Optional migration diff for breaking change info\n * @returns {string} Generated db.ts file contents\n */\nfunction generateDbTypesFromSnapshot(snapshot: SchemaSnapshot, diff?: MigrationDiff): string {\n const types = Object.values(snapshot.types);\n if (types.length === 0) {\n return generateEmptyDbTypes(snapshot.namespace);\n }\n\n // Extract breaking change field information\n const breakingChangeFields = diff\n ? extractBreakingChangeFields(diff)\n : {\n optionalToRequired: new Map(),\n addedRequiredFields: new Map(),\n enumValueChanges: new Map(),\n };\n\n // Track which utility types are used\n const usedUtilityTypes = new Set<\"Timestamp\" | \"Serial\">();\n\n // Generate type definitions\n const typeDefinitions: string[] = [];\n for (const type of types) {\n const result = generateTableType(type, breakingChangeFields);\n if (result.usedTimestamp) usedUtilityTypes.add(\"Timestamp\");\n typeDefinitions.push(result.typeDef);\n }\n\n // Build imports\n // ColumnType is always needed for Generated and Timestamp utility types\n const imports: string[] = [\"type ColumnType\", \"type Transaction as KyselyTransaction\"];\n\n // Build utility type declarations\n const utilityTypeDeclarations: string[] = [];\n if (usedUtilityTypes.has(\"Timestamp\")) {\n utilityTypeDeclarations.push(\n \"type Timestamp = ColumnType<Date, Date | string, Date | string>;\",\n );\n }\n utilityTypeDeclarations.push(\n \"type Generated<T> = T extends ColumnType<infer S, infer I, infer U>\\n ? ColumnType<S, I | undefined, U>\\n : ColumnType<T, T | undefined, T>;\",\n );\n if (usedUtilityTypes.has(\"Serial\")) {\n utilityTypeDeclarations.push(\"type Serial<T = string | number> = ColumnType<T, never, never>;\");\n }\n\n // Build output\n const lines: string[] = [\n \"/**\",\n \" * Auto-generated Kysely types for migration script.\",\n \" * These types reflect the database schema state at this migration point.\",\n \" *\",\n \" * DO NOT EDIT - This file is auto-generated by the migration system.\",\n \" */\",\n \"\",\n `import { ${imports.join(\", \")} } from \"@tailor-platform/sdk/kysely\";`,\n \"\",\n ...utilityTypeDeclarations,\n \"\",\n \"interface Database {\",\n ...typeDefinitions,\n \"}\",\n \"\",\n \"export type Transaction = KyselyTransaction<Database>;\",\n ];\n\n return lines.join(\"\\n\") + \"\\n\";\n}\n\n/**\n * Generate an empty db.ts file for migrations with no types\n * @param {string} namespace - Namespace name\n * @returns {string} Empty db.ts file contents\n */\nfunction generateEmptyDbTypes(namespace: string): string {\n return (\n [\n \"/**\",\n \" * Auto-generated Kysely types for migration script.\",\n ` * Namespace: ${namespace}`,\n \" *\",\n \" * DO NOT EDIT - This file is auto-generated by the migration system.\",\n \" */\",\n \"\",\n 'import { type Transaction as KyselyTransaction } from \"@tailor-platform/sdk/kysely\";',\n \"\",\n \"// eslint-disable-next-line @typescript-eslint/no-empty-object-type\",\n \"interface Database {}\",\n \"\",\n \"export type Transaction = KyselyTransaction<Database>;\",\n ].join(\"\\n\") + \"\\n\"\n );\n}\n\n/**\n * Generate table type definition from a snapshot type\n * @param {SnapshotType} type - Snapshot type\n * @param {BreakingChangeFieldInfo} breakingChangeFields - Breaking change field info\n * @returns {{ typeDef: string; usedTimestamp: boolean; usedColumnType: boolean }} Generated type and utility type usage\n */\nfunction generateTableType(\n type: SnapshotType,\n breakingChangeFields: BreakingChangeFieldInfo,\n): {\n typeDef: string;\n usedTimestamp: boolean;\n usedColumnType: boolean;\n} {\n const fieldLines: string[] = [];\n let usedTimestamp = false;\n let usedColumnType = false;\n\n // Add id field first\n fieldLines.push(\" id: Generated<string>;\");\n\n // Get fields that are changing from optional to required for this type\n const optionalToRequiredFields =\n breakingChangeFields.optionalToRequired.get(type.name) || new Set();\n\n // Get newly added required fields for this type\n const addedRequiredFields = breakingChangeFields.addedRequiredFields.get(type.name) || new Map();\n\n // Get enum value changes for this type\n const enumValueChangesForType = breakingChangeFields.enumValueChanges.get(type.name) || new Map();\n\n for (const [fieldName, fieldConfig] of Object.entries(type.fields)) {\n if (fieldName === \"id\") continue;\n\n const isOptionalToRequired = optionalToRequiredFields.has(fieldName);\n const enumValueChange = enumValueChangesForType.get(fieldName);\n const result = generateFieldType(fieldConfig, isOptionalToRequired, enumValueChange);\n fieldLines.push(` ${fieldName}: ${result.type};`);\n usedTimestamp = usedTimestamp || result.usedTimestamp;\n usedColumnType = usedColumnType || result.usedColumnType;\n }\n\n // Add newly added required fields with ColumnType (same as optional→required)\n // These fields are added as nullable in pre-migration, then become required in post-migration\n for (const [fieldName, fieldConfig] of addedRequiredFields) {\n // Treat as optional→required change (isOptionalToRequired: true)\n const result = generateFieldType(fieldConfig, true, undefined);\n fieldLines.push(` ${fieldName}: ${result.type};`);\n usedTimestamp = usedTimestamp || result.usedTimestamp;\n usedColumnType = usedColumnType || result.usedColumnType;\n }\n\n const typeDef = ` ${type.name}: {\\n${fieldLines.join(\"\\n\")}\\n }`;\n\n return { typeDef, usedTimestamp, usedColumnType };\n}\n\nfunction mapToTsType(fieldType: string): {\n type: string;\n usedTimestamp: boolean;\n} {\n switch (fieldType) {\n case \"uuid\":\n case \"string\":\n case \"decimal\":\n return { type: \"string\", usedTimestamp: false };\n case \"integer\":\n case \"float\":\n case \"number\":\n return { type: \"number\", usedTimestamp: false };\n case \"date\":\n case \"datetime\":\n return { type: \"Timestamp\", usedTimestamp: true };\n case \"bool\":\n case \"boolean\":\n return { type: \"boolean\", usedTimestamp: false };\n default:\n return { type: \"string\", usedTimestamp: false };\n }\n}\n\nfunction formatEnumUnion(values: string[]): string {\n return values.map((v) => `\"${v}\"`).join(\" | \");\n}\n\nfunction generateEnumChangeColumnType(\n enumValueChange: EnumValueChange,\n config: SnapshotFieldConfig,\n): string {\n const allValues = [...new Set([...enumValueChange.beforeValues, ...enumValueChange.afterValues])];\n const selectType = formatEnumUnion(allValues);\n const afterType = formatEnumUnion(enumValueChange.afterValues);\n\n if (config.array && !config.required) {\n return `ColumnType<(${selectType})[] | null, (${afterType})[] | null, (${afterType})[] | null>`;\n }\n if (config.array) {\n return `ColumnType<(${selectType})[], (${afterType})[], (${afterType})[]>`;\n }\n if (!config.required) {\n return `ColumnType<(${selectType}) | null, (${afterType}) | null, (${afterType}) | null>`;\n }\n return `ColumnType<${selectType}, ${afterType}, ${afterType}>`;\n}\n\n/**\n * Generate field type from snapshot field config\n * @param {SnapshotFieldConfig} config - Field configuration\n * @param {boolean} isOptionalToRequired - Whether this field is changing from optional to required\n * @param {EnumValueChange} [enumValueChange] - Enum value change info if applicable\n * @returns {{ type: string; usedTimestamp: boolean; usedColumnType: boolean }} Generated type string and utility type usage\n */\nfunction generateFieldType(\n config: SnapshotFieldConfig,\n isOptionalToRequired: boolean,\n enumValueChange?: EnumValueChange,\n): {\n type: string;\n usedTimestamp: boolean;\n usedColumnType: boolean;\n} {\n // Handle enum value changes specially\n if (enumValueChange) {\n return {\n type: generateEnumChangeColumnType(enumValueChange, config),\n usedTimestamp: false,\n usedColumnType: true,\n };\n }\n\n // Get base type\n let baseType: string;\n let usedTimestamp = false;\n\n if (config.type === \"enum\") {\n const enumValues = config.allowedValues?.map((v) => v.value) ?? [];\n baseType = enumValues.length > 0 ? formatEnumUnion(enumValues) : \"string\";\n } else {\n const mapped = mapToTsType(config.type);\n baseType = mapped.type;\n usedTimestamp = mapped.usedTimestamp;\n }\n\n // Apply array modifier\n let type = baseType;\n if (config.array) {\n const needsParens =\n config.type === \"enum\" && config.allowedValues && config.allowedValues.length > 0;\n type = needsParens ? `(${baseType})[]` : `${baseType}[]`;\n }\n\n // Handle nullable/required modifiers\n if (isOptionalToRequired) {\n // For fields changing from optional to required:\n // SELECT returns T | null (existing data might be null)\n // INSERT/UPDATE requires T (must provide a value)\n return {\n type: `ColumnType<${type} | null, ${type}, ${type}>`,\n usedTimestamp,\n usedColumnType: true,\n };\n }\n\n if (!config.required) {\n type = `${type} | null`;\n }\n\n return { type, usedTimestamp, usedColumnType: false };\n}\n\n/**\n * Write db.ts file for a migration\n * @param {SchemaSnapshot} snapshot - Schema snapshot to generate types from\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} migrationNumber - Migration number\n * @param {MigrationDiff} [diff] - Optional migration diff for breaking change info\n * @returns {Promise<string>} Path to the written file\n */\nexport async function writeDbTypesFile(\n snapshot: SchemaSnapshot,\n migrationsDir: string,\n migrationNumber: number,\n diff?: MigrationDiff,\n): Promise<string> {\n const content = generateDbTypesFromSnapshot(snapshot, diff);\n const filePath = getMigrationFilePath(migrationsDir, migrationNumber, \"db\");\n await fs.writeFile(filePath, content);\n return filePath;\n}\n","/**\n * Template generator for TailorDB migrations\n *\n * Generates migration files in directory structure:\n * - XXXX/schema.json - Full schema snapshot (initial migration 0000)\n * - XXXX/diff.json - Schema diff (subsequent migrations 0001+)\n * - XXXX/migrate.ts - Data migration script (when breaking changes exist)\n * - XXXX/db.ts - Generated types for migration script\n */\n\nimport * as fs from \"node:fs/promises\";\nimport { writeDbTypesFile } from \"./db-types-generator\";\nimport {\n getMigrationDirPath,\n getMigrationFilePath,\n type SchemaSnapshot,\n type SnapshotFieldConfig,\n} from \"./snapshot\";\nimport type { MigrationDiff, DiffChange } from \"./diff-calculator\";\n\n/**\n * Check if a file exists\n * @param {string} filePath - Path to check\n * @returns {Promise<boolean>} True if file exists\n */\nasync function fileExists(filePath: string): Promise<boolean> {\n try {\n await fs.access(filePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Ensure a file does not already exist, throwing an error if it does\n * @param {string} filePath - Path to check\n * @throws {Error} If file already exists\n */\nasync function ensureFileNotExists(filePath: string): Promise<void> {\n if (await fileExists(filePath)) {\n throw new Error(`Migration file already exists: ${filePath}`);\n }\n}\n\ninterface GenerateSchemaResult {\n filePath: string;\n migrationNumber: number;\n}\n\ninterface GenerateDiffResult {\n diffFilePath: string;\n migrateFilePath?: string;\n dbTypesFilePath?: string;\n migrationNumber: number;\n}\n\n/**\n * Generate the initial schema snapshot file\n * @param {SchemaSnapshot} snapshot - Schema snapshot to save\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} migrationNumber - Migration number\n * @returns {Promise<GenerateSchemaResult>} Generated file info\n */\nexport async function generateSchemaFile(\n snapshot: SchemaSnapshot,\n migrationsDir: string,\n migrationNumber: number,\n): Promise<GenerateSchemaResult> {\n // Create migration directory\n const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);\n await fs.mkdir(migrationDir, { recursive: true });\n\n const filePath = getMigrationFilePath(migrationsDir, migrationNumber, \"schema\");\n\n // Check if file already exists to prevent accidental overwrite\n await ensureFileNotExists(filePath);\n\n await fs.writeFile(filePath, JSON.stringify(snapshot, null, 2));\n\n return {\n filePath,\n migrationNumber,\n };\n}\n\n/**\n * Generate diff and optional migration script files\n * @param {MigrationDiff} diff - Migration diff to save\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} migrationNumber - Migration number\n * @param {SchemaSnapshot} previousSnapshot - Previous schema snapshot (for db.ts generation)\n * @param {string} [description] - Optional description for the migration\n * @returns {Promise<GenerateDiffResult>} Generated file info\n */\nexport async function generateDiffFiles(\n diff: MigrationDiff,\n migrationsDir: string,\n migrationNumber: number,\n previousSnapshot: SchemaSnapshot,\n description?: string,\n): Promise<GenerateDiffResult> {\n // Create migration directory\n const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);\n await fs.mkdir(migrationDir, { recursive: true });\n\n // Build file paths\n const diffFilePath = getMigrationFilePath(migrationsDir, migrationNumber, \"diff\");\n const migrateFilePath = getMigrationFilePath(migrationsDir, migrationNumber, \"migrate\");\n const dbTypesFilePath = getMigrationFilePath(migrationsDir, migrationNumber, \"db\");\n\n // Check if files already exist to prevent accidental overwrite\n await ensureFileNotExists(diffFilePath);\n if (diff.requiresMigrationScript) {\n await ensureFileNotExists(migrateFilePath);\n await ensureFileNotExists(dbTypesFilePath);\n }\n\n // Add description if provided\n if (description) {\n diff = { ...diff, description };\n }\n\n // Write diff file\n await fs.writeFile(diffFilePath, JSON.stringify(diff, null, 2));\n\n const result: GenerateDiffResult = {\n diffFilePath,\n migrationNumber,\n };\n\n // Generate migration script and db types only if migration script is required\n if (diff.requiresMigrationScript) {\n const scriptContent = generateMigrationScript(diff);\n await fs.writeFile(migrateFilePath, scriptContent);\n result.migrateFilePath = migrateFilePath;\n\n // Generate db.ts with types based on the PREVIOUS schema state\n // (the state before this migration runs)\n // Pass diff to generate ColumnType for optional->required fields\n await writeDbTypesFile(previousSnapshot, migrationsDir, migrationNumber, diff);\n result.dbTypesFilePath = dbTypesFilePath;\n }\n\n return result;\n}\n\n/**\n * Generate migration script content based on diff\n * @param {MigrationDiff} diff - Migration diff\n * @returns {string} Migration script content\n */\nfunction generateMigrationScript(diff: MigrationDiff): string {\n const updates: string[] = [];\n\n for (const change of diff.changes) {\n const script = generateChangeScript(change);\n if (script) {\n updates.push(script);\n }\n }\n\n if (updates.length === 0) {\n updates.push(` // No data migration needed for this schema change\n // Add custom data transformations if required`);\n }\n\n return `/**\n * Migration script for ${diff.namespace}\n *\n * This script handles data migration for breaking schema changes.\n * Edit this file to implement your data migration logic.\n *\n * The transaction is managed by the apply command.\n * If any operation fails, all changes will be rolled back.\n */\n\nimport type { Transaction } from \"./db\";\n\nexport async function main(trx: Transaction): Promise<void> {\n${updates.join(\"\\n\\n\")}\n}\n`;\n}\n\n/**\n * Generate script for a single change\n * @param {DiffChange} change - Diff change to generate script for\n * @returns {string | null} Script content or null if no script needed\n */\nfunction generateChangeScript(change: DiffChange): string | null {\n if (change.kind === \"field_added\") {\n const field = change.after as SnapshotFieldConfig;\n if (field.required) {\n return ` // Populate ${change.fieldName} for existing ${change.typeName} records\n await trx\n .updateTable(\"${change.typeName}\")\n .set({\n ${change.fieldName}: null, // TODO: Set appropriate default value\n })\n .execute();`;\n }\n return null;\n }\n\n if (change.kind !== \"field_modified\") {\n // No data migration needed for type_added, type_removed, or field_removed\n return null;\n }\n\n const before = change.before as SnapshotFieldConfig;\n const after = change.after as SnapshotFieldConfig;\n\n // Note: Type change is rejected as unsupported in generate.ts\n // No script generation needed here\n\n // Optional to required\n if (!before.required && after.required) {\n return ` // Set ${change.fieldName} for ${change.typeName} records where it is null\n await trx\n .updateTable(\"${change.typeName}\")\n .set({\n ${change.fieldName}: null, // TODO: Set appropriate default value\n })\n .where(\"${change.fieldName}\", \"is\", null)\n .execute();`;\n }\n\n // Note: Array to single value change is rejected in generate.ts\n // No script generation needed here\n\n // Unique constraint added\n if (!(before.unique ?? false) && (after.unique ?? false)) {\n return ` // Ensure ${change.fieldName} values are unique before adding constraint\n const duplicates = await trx\n .selectFrom(\"${change.typeName}\")\n .select([\"${change.fieldName}\"])\n .groupBy(\"${change.fieldName}\")\n .having((eb) => eb.fn.count(\"id\"), \">\", 1)\n .execute();\n for (const dup of duplicates) {\n // Keep first record, add suffix to others\n const records = await trx\n .selectFrom(\"${change.typeName}\")\n .select([\"id\", \"${change.fieldName}\"])\n .where(\"${change.fieldName}\", \"=\", dup.${change.fieldName})\n .execute();\n for (let i = 1; i < records.length; i++) {\n await trx\n .updateTable(\"${change.typeName}\")\n .set({ ${change.fieldName}: \\`\\${records[i].${change.fieldName}}_\\${i}\\` }) // TODO: Set appropriate unique value\n .where(\"id\", \"=\", records[i].id)\n .execute();\n }\n }`;\n }\n\n // Enum values removed\n if (before.type === \"enum\" && after.type === \"enum\") {\n const beforeValues = before.allowedValues ?? [];\n const afterValues = after.allowedValues ?? [];\n const removedValues = beforeValues.filter((v) => !afterValues.includes(v));\n if (removedValues.length > 0) {\n const defaultValue = afterValues[0] ?? \"NEW_VALUE\";\n return ` // Migrate records with removed enum values: ${removedValues.join(\", \")}\n await trx\n .updateTable(\"${change.typeName}\")\n .set({ ${change.fieldName}: \"${defaultValue}\" }) // TODO: Set appropriate value\n .where(\"${change.fieldName}\", \"in\", [${removedValues.map((v) => `\"${v}\"`).join(\", \")}])\n .execute();`;\n }\n }\n\n // Foreign key relationship changed\n if (\n before.foreignKeyType &&\n after.foreignKeyType &&\n before.foreignKeyType !== after.foreignKeyType\n ) {\n return ` // Migrate ${change.fieldName} references from ${before.foreignKeyType} to ${after.foreignKeyType}\n // Find records that don't have a valid reference in the new target table\n const orphanedRecords = await trx\n .selectFrom(\"${change.typeName}\")\n .leftJoin(\"${after.foreignKeyType}\", \"${change.typeName}.${change.fieldName}\", \"${after.foreignKeyType}.id\")\n .select([\"${change.typeName}.id\", \"${change.typeName}.${change.fieldName}\"])\n .where(\"${after.foreignKeyType}.id\", \"is\", null)\n .where(\"${change.typeName}.${change.fieldName}\", \"is not\", null)\n .execute();\n for (const record of orphanedRecords) {\n await trx\n .updateTable(\"${change.typeName}\")\n .set({ ${change.fieldName}: null }) // TODO: Set appropriate new reference\n .where(\"id\", \"=\", record.id)\n .execute();\n }`;\n }\n\n return null;\n}\n\n/**\n * Check if a migration script exists for a given migration number\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} migrationNumber - Migration number\n * @returns {Promise<boolean>} True if script exists\n */\nexport async function migrationScriptExists(\n migrationsDir: string,\n migrationNumber: number,\n): Promise<boolean> {\n const filePath = getMigrationFilePath(migrationsDir, migrationNumber, \"migrate\");\n return fileExists(filePath);\n}\n\n/**\n * Get the migration script path for a given migration number\n * @param {string} migrationsDir - Migrations directory path\n * @param {number} migrationNumber - Migration number\n * @returns {string} Full path to migration script\n */\nexport function getMigrationScriptPath(migrationsDir: string, migrationNumber: number): string {\n return getMigrationFilePath(migrationsDir, migrationNumber, \"migrate\");\n}\n","/**\n * Generate command for TailorDB migrations\n *\n * Generates migration files based on local schema snapshots:\n * - First run: Creates initial schema snapshot (0000/schema.json)\n * - Subsequent runs: Creates diff from previous snapshot (0001/diff.json, etc.)\n */\n\nimport * as fs from \"node:fs\";\nimport * as fsPromises from \"node:fs/promises\";\nimport * as path from \"pathe\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, configArg, confirmationArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { logBetaWarning } from \"@/cli/shared/beta\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { getConfiguredEditorCommand, openInConfiguredEditor } from \"@/cli/shared/editor\";\nimport { logger, styles } from \"@/cli/shared/logger\";\nimport { PluginManager } from \"@/plugin/manager\";\nimport { getNamespacesWithMigrations, type NamespaceWithMigrations } from \"./config\";\nimport {\n formatMigrationDiff,\n formatBreakingChanges,\n formatDiffSummary,\n hasChanges,\n} from \"./diff-calculator\";\nimport {\n createSnapshotFromLocalTypes,\n reconstructSnapshotFromMigrations,\n compareSnapshots,\n getNextMigrationNumber,\n assertValidMigrationFiles,\n INITIAL_SCHEMA_NUMBER,\n type SchemaSnapshot,\n} from \"./snapshot\";\nimport { generateSchemaFile, generateDiffFiles } from \"./template-generator\";\n\nexport interface GenerateOptions {\n configPath?: string;\n name?: string;\n yes?: boolean;\n init?: boolean;\n}\n\n/**\n * Handle --init option: delete existing migrations directories\n * @param {NamespaceWithMigrations[]} namespaces - Namespaces with migrations\n * @param {boolean} skipConfirmation - Whether to skip confirmation prompt\n * @returns {Promise<void>}\n */\nasync function handleInitOption(\n namespaces: NamespaceWithMigrations[],\n skipConfirmation?: boolean,\n): Promise<void> {\n // Find directories that exist\n const existingDirs = namespaces.filter(({ migrationsDir }) => fs.existsSync(migrationsDir));\n\n if (existingDirs.length === 0) {\n logger.info(\"No existing migration directories found.\");\n return;\n }\n\n // Show warning\n logger.newline();\n logger.warn(\"This will DELETE all existing migration files:\");\n for (const { namespace, migrationsDir } of existingDirs) {\n logger.log(` - ${namespace}: ${migrationsDir}`);\n }\n logger.newline();\n\n // Confirmation prompt\n if (!skipConfirmation) {\n const confirmation = await logger.prompt(\n \"Are you sure you want to delete these directories and start fresh?\",\n {\n type: \"confirm\",\n initial: false,\n },\n );\n\n if (!confirmation) {\n logger.info(\"Operation cancelled.\");\n process.exit(0);\n }\n logger.newline();\n }\n\n // Delete directories\n for (const { namespace, migrationsDir } of existingDirs) {\n try {\n await fsPromises.rm(migrationsDir, { recursive: true, force: true });\n logger.success(`Deleted migration directory for ${styles.bold(namespace)}`);\n } catch (error) {\n logger.error(`Failed to delete ${migrationsDir}: ${error}`);\n throw error;\n }\n }\n\n logger.newline();\n logger.info(\"Migration directories cleared. Generating initial snapshot...\");\n logger.newline();\n}\n\n/**\n * Generate migration files for TailorDB schema changes\n * @param {GenerateOptions} options - Generation options\n * @returns {Promise<void>} Promise that resolves when generation is complete\n */\nexport async function generate(options: GenerateOptions): Promise<void> {\n logBetaWarning(\"tailordb migration\");\n\n // Load configuration\n const { config, plugins } = await loadConfig(options.configPath);\n const configDir = path.dirname(config.path);\n\n // Get namespaces with migrations config\n const namespacesWithMigrations: NamespaceWithMigrations[] = getNamespacesWithMigrations(\n config,\n configDir,\n );\n\n if (namespacesWithMigrations.length === 0) {\n logger.warn(\"No TailorDB namespaces with migrations config found.\");\n logger.info(\n 'Add \"migration: { directory: \\\\\"./migrations\\\\\" }\" to your db config to enable migrations.',\n );\n return;\n }\n\n // Handle --init option: delete existing migrations directory\n if (options.init) {\n await handleInitOption(namespacesWithMigrations, options.yes);\n }\n\n // Initialize plugin manager if plugins are provided\n let pluginManager: PluginManager | undefined;\n if (plugins.length > 0) {\n pluginManager = new PluginManager(plugins);\n }\n\n // Load application and all types\n const { defineApplication } = await import(\"@/cli/services/application\");\n const application = defineApplication({ config, pluginManager });\n\n // Process each namespace\n for (const { namespace, migrationsDir } of namespacesWithMigrations) {\n logger.info(`Processing namespace: ${styles.bold(namespace)}`);\n\n // Validate existing migration files before generating new ones\n assertValidMigrationFiles(migrationsDir, namespace);\n\n // Find the TailorDB service for this namespace\n const tailordbService = application.tailorDBServices.find((s) => s.namespace === namespace);\n if (!tailordbService) {\n logger.warn(`No TailorDB service found for namespace \"${namespace}\"`);\n continue;\n }\n\n // Load types for this service\n await tailordbService.loadTypes();\n await tailordbService.processNamespacePlugins();\n\n const localTypesObj = tailordbService.types;\n\n // Create snapshot from current local types\n const currentSnapshot = createSnapshotFromLocalTypes(localTypesObj, namespace);\n\n // Check if migrations directory exists and has snapshots\n let previousSnapshot: SchemaSnapshot | null = null;\n try {\n previousSnapshot = reconstructSnapshotFromMigrations(migrationsDir);\n } catch {\n // No previous migrations - this is fine\n }\n\n if (!previousSnapshot) {\n // First migration - generate initial schema snapshot\n await generateInitialSnapshot(currentSnapshot, migrationsDir);\n } else {\n // Compare with previous snapshot and generate diff\n await generateDiffFromSnapshot(previousSnapshot, currentSnapshot, migrationsDir, options);\n }\n }\n}\n\n/**\n * Generate the initial schema snapshot\n * @param {SchemaSnapshot} snapshot - Schema snapshot to save\n * @param {string} migrationsDir - Migrations directory path\n * @returns {Promise<void>} Promise that resolves when snapshot is generated\n */\nasync function generateInitialSnapshot(\n snapshot: SchemaSnapshot,\n migrationsDir: string,\n): Promise<void> {\n const result = await generateSchemaFile(snapshot, migrationsDir, INITIAL_SCHEMA_NUMBER);\n\n logger.success(`Generated initial schema snapshot`);\n logger.info(` File: ${result.filePath}`);\n logger.info(` Types: ${Object.keys(snapshot.types).length}`);\n\n logger.log(\"\\nThis is the baseline schema. Future changes will be tracked as diffs.\");\n}\n\n/**\n * Generate diff from previous snapshot\n * @param {SchemaSnapshot} previousSnapshot - Previous schema snapshot\n * @param {SchemaSnapshot} currentSnapshot - Current schema snapshot\n * @param {string} migrationsDir - Migrations directory path\n * @param {GenerateOptions} options - Generate options\n * @returns {Promise<void>} Promise that resolves when diff is generated\n */\nasync function generateDiffFromSnapshot(\n previousSnapshot: SchemaSnapshot,\n currentSnapshot: SchemaSnapshot,\n migrationsDir: string,\n options: GenerateOptions,\n): Promise<void> {\n // Calculate diff\n const diff = compareSnapshots(previousSnapshot, currentSnapshot);\n\n // Check if there are any changes\n if (!hasChanges(diff)) {\n logger.info(\"No schema differences detected.\");\n return;\n }\n\n // Display diff\n logger.newline();\n logger.log(formatMigrationDiff(diff));\n logger.newline();\n logger.info(`Summary: ${formatDiffSummary(diff)}`);\n\n // Check for unsupported changes\n const unsupportedChanges = diff.breakingChanges.filter((change) => change.unsupported);\n if (unsupportedChanges.length > 0) {\n for (const change of unsupportedChanges) {\n logger.newline();\n logger.error(`Unsupported change: ${change.typeName}.${change.fieldName}`);\n logger.error(` ${change.reason}`);\n }\n\n // Show 3-step migration hint if any unsupported change requires it\n if (unsupportedChanges.some((change) => change.showThreeStepHint)) {\n logger.newline();\n logger.info(\"These changes require a manual 3-step migration process:\");\n logger.info(\" Migration 1: Add a new field with the desired structure\");\n logger.info(\" and migrate data from old field to new field\");\n logger.info(\" Migration 2: Remove the old field\");\n logger.info(\" Migration 3: Add the field with the original name and new structure,\");\n logger.info(\" migrate data from temporary field, then remove temporary field\");\n }\n\n const details = unsupportedChanges\n .map((c) => ` - ${c.typeName}.${c.fieldName}: ${c.reason}`)\n .join(\"\\n\");\n throw new Error(`Unsupported schema changes detected:\\n${details}`);\n }\n\n // Warn about breaking changes\n if (diff.hasBreakingChanges) {\n logger.newline();\n logger.warn(formatBreakingChanges(diff.breakingChanges));\n\n if (!options.yes) {\n const confirmation = await logger.prompt(\"Continue generating migration?\", {\n type: \"confirm\",\n initial: true,\n cancel: \"symbol\",\n });\n\n if (confirmation !== true) {\n logger.info(\"Migration generation cancelled.\");\n return;\n }\n logger.newline();\n }\n }\n\n // Get next migration number\n const migrationNumber = getNextMigrationNumber(migrationsDir);\n\n // Generate diff and optional migration script (pass previousSnapshot for db.ts generation)\n const result = await generateDiffFiles(\n diff,\n migrationsDir,\n migrationNumber,\n previousSnapshot,\n options.name,\n );\n\n logger.success(\n `Generated migration ${styles.bold(result.migrationNumber.toString().padStart(4, \"0\"))}`,\n );\n logger.info(` Diff file: ${result.diffFilePath}`);\n\n if (result.migrateFilePath) {\n logger.info(` Migration script: ${result.migrateFilePath}`);\n if (result.dbTypesFilePath) {\n logger.info(` DB types: ${result.dbTypesFilePath}`);\n }\n logger.newline();\n logger.log(\"A migration script was generated for breaking changes.\");\n logger.log(\"Please review and edit the script before running 'tailor-sdk apply'.\");\n\n const editor = getConfiguredEditorCommand();\n if (!editor) {\n return;\n }\n\n try {\n await fsPromises.access(result.migrateFilePath);\n } catch {\n return;\n }\n\n logger.newline();\n logger.info(`Opening ${path.basename(result.migrateFilePath)} in ${editor}...`);\n\n try {\n await openInConfiguredEditor(result.migrateFilePath);\n } catch {\n return;\n }\n }\n}\n\n/**\n * CLI command definition for generate\n */\nexport const generateCommand = defineCommand({\n name: \"generate\",\n description:\n \"Generate migration files by detecting schema differences between current local types and the previous migration snapshot.\",\n args: z\n .object({\n ...commonArgs,\n ...confirmationArgs,\n ...configArg,\n name: arg(z.string().optional(), {\n alias: \"n\",\n description: \"Optional description for the migration\",\n }),\n init: arg(z.boolean().default(false), {\n description: \"Delete existing migrations and start fresh\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n await generate({\n configPath: args.config,\n name: args.name,\n yes: args.yes,\n init: args.init,\n });\n }),\n});\n","import type { LoadedConfig } from \"./config-loader\";\n\n/**\n * Extracts all configured namespace names from loaded application config.\n * Currently namespaces are derived from the `db` section.\n * @param config - Loaded application configuration.\n * @returns Namespace names in insertion order.\n */\nexport function extractAllNamespaces(config: LoadedConfig): string[] {\n const namespaces = new Set<string>();\n\n // Collect namespace names from db configuration\n if (config.db) {\n for (const [namespaceName] of Object.entries(config.db)) {\n namespaces.add(namespaceName);\n }\n }\n\n return Array.from(namespaces);\n}\n","type ListTailorDBTypesClient = {\n listTailorDBTypes(args: { workspaceId: string; namespaceName: string }): Promise<{\n tailordbTypes: Array<{ name: string }>;\n }>;\n};\n\ntype ResolveTypeNamespacesArgs = {\n workspaceId: string;\n namespaces: string[];\n typeNames: string[];\n client: ListTailorDBTypesClient;\n};\n\n/**\n * Resolve TailorDB type names to namespace names.\n * @param args - Resolution inputs\n * @returns Type to namespace map for found types\n */\nexport async function resolveTypeNamespaces(\n args: ResolveTypeNamespacesArgs,\n): Promise<Map<string, string>> {\n const requestedTypesByLowercase = new Map<string, string[]>();\n for (const typeName of args.typeNames) {\n const key = typeName.toLowerCase();\n const existing = requestedTypesByLowercase.get(key);\n if (existing) {\n existing.push(typeName);\n continue;\n }\n requestedTypesByLowercase.set(key, [typeName]);\n }\n\n const unresolvedTypes = new Set(args.typeNames);\n const typeNamespaceMap = new Map<string, string>();\n\n for (const namespace of args.namespaces) {\n if (unresolvedTypes.size === 0) {\n break;\n }\n\n try {\n const { tailordbTypes } = await args.client.listTailorDBTypes({\n workspaceId: args.workspaceId,\n namespaceName: namespace,\n });\n\n for (const type of tailordbTypes) {\n const matchedRequestedTypes = requestedTypesByLowercase.get(type.name.toLowerCase());\n if (!matchedRequestedTypes) {\n continue;\n }\n\n for (const requestedTypeName of matchedRequestedTypes) {\n if (typeNamespaceMap.has(requestedTypeName)) {\n continue;\n }\n typeNamespaceMap.set(requestedTypeName, namespace);\n unresolvedTypes.delete(requestedTypeName);\n }\n }\n } catch {\n continue;\n }\n }\n\n return typeNamespaceMap;\n}\n\ntype ResolveTypeNamespaceArgs = {\n workspaceId: string;\n namespaces: string[];\n typeName: string;\n client: ListTailorDBTypesClient;\n};\n\n/**\n * Resolve a single TailorDB type name to namespace.\n * @param args - Resolution inputs\n * @returns Namespace name if found\n */\nexport async function resolveTypeNamespace(args: ResolveTypeNamespaceArgs): Promise<string | null> {\n const typeNamespaceMap = await resolveTypeNamespaces({\n workspaceId: args.workspaceId,\n namespaces: args.namespaces,\n typeNames: [args.typeName],\n client: args.client,\n });\n\n return typeNamespaceMap.get(args.typeName) ?? null;\n}\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, confirmationArgs, deploymentArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { extractAllNamespaces } from \"@/cli/shared/config\";\nimport { loadConfig } from \"@/cli/shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { resolveTypeNamespaces } from \"@/cli/shared/tailordb-namespace\";\n\nexport interface TruncateOptions {\n workspaceId?: string;\n profile?: string;\n configPath?: string;\n all?: boolean;\n namespace?: string;\n types?: string[];\n}\n\ninterface InternalTruncateOptions extends TruncateOptions {\n yes?: boolean;\n}\n\ninterface TruncateSingleTypeOptions {\n workspaceId: string;\n namespaceName: string;\n typeName: string;\n}\n\nasync function truncateSingleType(\n options: TruncateSingleTypeOptions,\n client: Awaited<ReturnType<typeof initOperatorClient>>,\n): Promise<void> {\n await client.truncateTailorDBType({\n workspaceId: options.workspaceId,\n namespaceName: options.namespaceName,\n tailordbTypeName: options.typeName,\n });\n\n logger.success(`Truncated type \"${options.typeName}\" in namespace \"${options.namespaceName}\"`);\n}\n\nasync function truncateNamespace(\n workspaceId: string,\n namespaceName: string,\n client: Awaited<ReturnType<typeof initOperatorClient>>,\n): Promise<void> {\n await client.truncateTailorDBTypes({\n workspaceId,\n namespaceName,\n });\n\n logger.success(`Truncated all types in namespace \"${namespaceName}\"`);\n}\n\n/**\n * Truncate TailorDB data based on the given options.\n * @param options - Truncate options (all, namespace, or types)\n * @returns Promise that resolves when truncation completes\n */\nexport async function truncate(options?: TruncateOptions): Promise<void> {\n return await $truncate({ ...options, yes: true });\n}\n\nasync function $truncate(options?: InternalTruncateOptions): Promise<void> {\n // Load and validate options\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n // Validate arguments\n const hasTypes = options?.types && options.types.length > 0;\n const hasNamespace = !!options?.namespace;\n const hasAll = !!options?.all;\n\n // All options are mutually exclusive\n const optionCount = [hasAll, hasNamespace, hasTypes].filter(Boolean).length;\n if (optionCount === 0) {\n throw new Error(\"Please specify one of: --all, --namespace <name>, or type names\");\n }\n if (optionCount > 1) {\n throw new Error(\n \"Options --all, --namespace, and type names are mutually exclusive. Please specify only one.\",\n );\n }\n\n // Validate config and get namespaces before confirmation\n const { config } = await loadConfig(options?.configPath);\n const namespaces = extractAllNamespaces(config);\n\n // Handle --all flag\n if (hasAll) {\n if (namespaces.length === 0) {\n logger.warn(\"No namespaces found in config file.\");\n return;\n }\n\n if (!options?.yes) {\n const namespaceList = namespaces.join(\", \");\n const confirmation = await logger.prompt(\n `This will truncate ALL tables in the following namespaces: ${namespaceList}. Continue? (yes/no)`,\n {\n type: \"confirm\",\n initial: false,\n },\n );\n if (!confirmation) {\n logger.info(\"Truncate cancelled.\");\n return;\n }\n }\n\n for (const namespace of namespaces) {\n await truncateNamespace(workspaceId, namespace, client);\n }\n logger.success(\"Truncated all tables in all namespaces\");\n return;\n }\n\n // Handle --namespace flag\n if (hasNamespace && options?.namespace) {\n const namespace = options.namespace;\n\n // Validate namespace exists in config\n if (!namespaces.includes(namespace)) {\n throw new Error(\n `Namespace \"${namespace}\" not found in config. Available namespaces: ${namespaces.join(\", \")}`,\n );\n }\n\n if (!options.yes) {\n const confirmation = await logger.prompt(\n `This will truncate ALL tables in namespace \"${namespace}\". Continue? (yes/no)`,\n {\n type: \"confirm\",\n initial: false,\n },\n );\n if (!confirmation) {\n logger.info(\"Truncate cancelled.\");\n return;\n }\n }\n\n await truncateNamespace(workspaceId, namespace, client);\n return;\n }\n\n // Handle specific types\n if (hasTypes && options?.types) {\n const typeNames = options.types;\n\n // Validate all types exist and get their namespaces before confirmation\n const typeNamespaceMap = await resolveTypeNamespaces({\n workspaceId,\n namespaces,\n typeNames,\n client,\n });\n const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));\n\n if (notFoundTypes.length > 0) {\n throw new Error(\n `The following types were not found in any namespace: ${notFoundTypes.join(\", \")}`,\n );\n }\n\n if (!options.yes) {\n const typeList = typeNames.join(\", \");\n const confirmation = await logger.prompt(\n `This will truncate the following types: ${typeList}. Continue? (yes/no)`,\n {\n type: \"confirm\",\n initial: false,\n },\n );\n if (!confirmation) {\n logger.info(\"Truncate cancelled.\");\n return;\n }\n }\n\n for (const typeName of typeNames) {\n const namespace = typeNamespaceMap.get(typeName);\n if (!namespace) {\n continue;\n }\n\n await truncateSingleType(\n {\n workspaceId,\n namespaceName: namespace,\n typeName,\n },\n client,\n );\n }\n }\n}\n\nexport const truncateCommand = defineCommand({\n name: \"truncate\",\n description: \"Truncate (delete all records from) TailorDB tables.\",\n args: z\n .object({\n ...commonArgs,\n ...deploymentArgs,\n ...confirmationArgs,\n types: arg(z.string().array().optional(), {\n positional: true,\n description: \"Type names to truncate\",\n }),\n all: arg(z.boolean().default(false), {\n alias: \"a\",\n description: \"Truncate all tables in all namespaces\",\n }),\n namespace: arg(z.string().optional(), {\n alias: \"n\",\n description: \"Truncate all tables in specified namespace\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const types = args.types && args.types.length > 0 ? args.types : undefined;\n await $truncate({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n all: args.all,\n namespace: args.namespace,\n types,\n yes: args.yes,\n });\n }),\n});\n","import { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { fetchAll, initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { type WorkflowListInfo, toWorkflowListInfo } from \"./transform\";\n\nexport interface ListWorkflowsOptions {\n workspaceId?: string;\n profile?: string;\n}\n\n/**\n * List workflows in the workspace and return CLI-friendly info.\n * @param options - Workflow listing options\n * @returns List of workflows\n */\nexport async function listWorkflows(options?: ListWorkflowsOptions): Promise<WorkflowListInfo[]> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options?.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options?.workspaceId,\n profile: options?.profile,\n });\n\n const workflows = await fetchAll(async (pageToken, maxPageSize) => {\n const { workflows, nextPageToken } = await client.listWorkflows({\n workspaceId,\n pageToken,\n pageSize: maxPageSize,\n });\n return [workflows, nextPageToken];\n });\n\n return workflows.map(toWorkflowListInfo);\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List all workflows in the workspace.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const workflows = await listWorkflows({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n if (workflows.length === 0 && !args.json) {\n logger.info(\"No workflows found.\");\n return;\n }\n logger.out(workflows);\n }),\n});\n","import { Code, ConnectError } from \"@connectrpc/connect\";\nimport { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n jsonArgs,\n parseDuration,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { waitArgs } from \"./args\";\nimport { getWorkflowExecution, printExecutionWithLogs } from \"./executions\";\nimport { waitForExecution, type WaitOptions } from \"./start\";\nimport { type WorkflowExecutionInfo } from \"./transform\";\n\nexport interface ResumeWorkflowOptions {\n executionId: string;\n workspaceId?: string;\n profile?: string;\n interval?: number;\n}\n\nexport interface ResumeWorkflowResultWithWait {\n executionId: string;\n wait: (options?: WaitOptions) => Promise<WorkflowExecutionInfo>;\n}\n\n/**\n * Resume a suspended workflow execution and return a handle to wait for completion.\n * @param options - Resume options\n * @returns Resume result with wait helper\n */\nexport async function resumeWorkflow(\n options: ResumeWorkflowOptions,\n): Promise<ResumeWorkflowResultWithWait> {\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: options.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: options.workspaceId,\n profile: options.profile,\n });\n\n try {\n const { executionId } = await client.testResumeWorkflow({\n workspaceId,\n executionId: options.executionId,\n });\n\n return {\n executionId,\n wait: (waitOptions?: WaitOptions) =>\n waitForExecution({\n client,\n workspaceId,\n executionId,\n interval: options.interval ?? 3000,\n showProgress: waitOptions?.showProgress,\n }),\n };\n } catch (error) {\n if (error instanceof ConnectError) {\n if (error.code === Code.NotFound) {\n throw new Error(`Execution '${options.executionId}' not found.`);\n }\n if (error.code === Code.FailedPrecondition) {\n throw new Error(`Execution '${options.executionId}' is not in a resumable state.`);\n }\n }\n throw error;\n }\n}\n\nexport const resumeCommand = defineCommand({\n name: \"resume\",\n description: \"Resume a failed or pending workflow execution.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n executionId: arg(z.string(), {\n positional: true,\n description: \"Failed execution ID\",\n }),\n ...waitArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const { executionId, wait } = await resumeWorkflow({\n executionId: args.executionId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n interval: parseDuration(args.interval),\n });\n\n if (!args.json) {\n logger.info(`Execution ID: ${executionId}`, { mode: \"stream\" });\n }\n\n if (args.wait) {\n const result = await wait({ showProgress: !args.json });\n if (args.logs && !args.json) {\n const { execution } = await getWorkflowExecution({\n executionId,\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n logs: true,\n });\n printExecutionWithLogs(execution);\n } else {\n logger.out(result);\n }\n } else {\n logger.out({ executionId });\n }\n }),\n});\n","import {\n type GetApplicationSchemaHealthResponse,\n GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus,\n} from \"@tailor-proto/tailor/v1/application_pb\";\nimport { ApplicationSchemaUpdateAttemptStatus } from \"@tailor-proto/tailor/v1/application_resource_pb\";\nimport { formatTimestamp } from \"@/cli/shared/format\";\nimport type { Application } from \"@tailor-proto/tailor/v1/application_resource_pb\";\n\nexport interface AppInfo {\n name: string;\n domain: string;\n authNamespace: string;\n createdAt: Date | null;\n updatedAt: Date | null;\n}\n\nexport interface AppHealthInfo {\n name: string;\n status: string;\n currentServingSchemaUpdatedAt: Date | null;\n lastAttemptStatus: string;\n lastAttemptAt: Date | null;\n lastAttemptError: string;\n}\n\nconst statusToString = (\n status: GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus,\n): string => {\n switch (status) {\n case GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus.OK:\n return \"ok\";\n case GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus.COMPOSITION_ERROR:\n return \"composition_error\";\n default:\n return \"unknown\";\n }\n};\n\nconst attemptStatusToString = (status: ApplicationSchemaUpdateAttemptStatus): string => {\n switch (status) {\n case ApplicationSchemaUpdateAttemptStatus.SUCCEEDED:\n return \"success\";\n case ApplicationSchemaUpdateAttemptStatus.FAILED:\n return \"failure\";\n default:\n return \"unknown\";\n }\n};\n\nexport const appInfo = (app: Application): AppInfo => {\n return {\n name: app.name,\n domain: app.domain,\n authNamespace: app.authNamespace,\n createdAt: formatTimestamp(app.createTime),\n updatedAt: formatTimestamp(app.updateTime),\n };\n};\n\nexport const appHealthInfo = (\n name: string,\n health: GetApplicationSchemaHealthResponse,\n): AppHealthInfo => {\n const attempt = health.lastSchemaUpdateAttempt;\n return {\n name,\n status: statusToString(health.status),\n currentServingSchemaUpdatedAt: formatTimestamp(health.currentServingSchemaUpdateTime),\n lastAttemptStatus: attempt ? attemptStatusToString(attempt.status) : \"N/A\",\n lastAttemptAt: formatTimestamp(attempt?.attemptTime),\n lastAttemptError: attempt?.error ?? \"\",\n };\n};\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { humanizeRelativeTime } from \"@/cli/shared/format\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { appHealthInfo, type AppHealthInfo } from \"./transform\";\n\nconst healthOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n name: z.string().min(1, { message: \"name is required\" }),\n});\n\nexport type HealthOptions = z.input<typeof healthOptionsSchema>;\n\nasync function loadOptions(options: HealthOptions) {\n const result = healthOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n name: result.data.name,\n };\n}\n\n/**\n * Get application schema health status.\n * @param options - Health check options\n * @returns Application health information\n */\nexport async function getAppHealth(options: HealthOptions): Promise<AppHealthInfo> {\n const { client, workspaceId, name } = await loadOptions(options);\n\n const response = await client.getApplicationSchemaHealth({\n workspaceId,\n applicationName: name,\n });\n\n return appHealthInfo(name, response);\n}\n\nexport const healthCommand = defineCommand({\n name: \"health\",\n description: \"Check application schema health\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n name: arg(z.string(), {\n description: \"Application name\",\n alias: \"n\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const health = await getAppHealth({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n name: args.name,\n });\n\n const formattedHealth = args.json\n ? health\n : {\n ...health,\n currentServingSchemaUpdatedAt: humanizeRelativeTime(health.currentServingSchemaUpdatedAt),\n lastAttemptAt: humanizeRelativeTime(health.lastAttemptAt),\n };\n\n logger.out(formattedHealth);\n }),\n});\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n jsonArgs,\n positiveIntArg,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { humanizeRelativeTime } from \"@/cli/shared/format\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { appInfo, type AppInfo } from \"./transform\";\n\nconst listAppsOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n limit: z.coerce.number().int().positive().optional(),\n});\n\nexport type ListAppsOptions = z.input<typeof listAppsOptionsSchema>;\n\nasync function loadOptions(options: ListAppsOptions) {\n const result = listAppsOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n limit: result.data.limit,\n };\n}\n\n/**\n * List applications in a workspace with an optional limit.\n * @param options - Application listing options\n * @returns List of applications\n */\nexport async function listApps(options: ListAppsOptions): Promise<AppInfo[]> {\n const { client, workspaceId, limit } = await loadOptions(options);\n const hasLimit = limit !== undefined;\n\n const results: AppInfo[] = [];\n let pageToken = \"\";\n\n while (true) {\n if (hasLimit && results.length >= limit!) {\n break;\n }\n\n const remaining = hasLimit ? limit! - results.length : undefined;\n const pageSize = remaining !== undefined && remaining > 0 ? remaining : undefined;\n\n const { applications, nextPageToken } = await client.listApplications({\n workspaceId,\n pageToken,\n ...(pageSize !== undefined ? { pageSize } : {}),\n });\n\n const mapped = applications.map(appInfo);\n\n if (remaining !== undefined && mapped.length > remaining) {\n results.push(...mapped.slice(0, remaining));\n } else {\n results.push(...mapped);\n }\n\n if (!nextPageToken) {\n break;\n }\n pageToken = nextPageToken;\n }\n\n return results;\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List applications in a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n limit: arg(positiveIntArg.optional(), {\n alias: \"l\",\n description: \"Maximum number of applications to list\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const apps = await listApps({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n limit: args.limit,\n });\n\n const formattedApps = args.json\n ? apps\n : apps.map(({ updatedAt: _, createdAt, ...rest }) => ({\n ...rest,\n createdAt: humanizeRelativeTime(createdAt),\n }));\n\n logger.out(formattedApps);\n }),\n});\n","import { formatTimestamp } from \"@/cli/shared/format\";\nimport type { Workspace } from \"@tailor-proto/tailor/v1/workspace_resource_pb\";\n\nexport interface WorkspaceInfo {\n id: string;\n name: string;\n region: string;\n createdAt: Date | null;\n updatedAt: Date | null;\n}\n\nexport interface WorkspaceDetails extends WorkspaceInfo {\n deleteProtection: boolean;\n organizationId: string;\n folderId: string;\n}\n\nexport const workspaceInfo = (workspace: Workspace): WorkspaceInfo => {\n return {\n id: workspace.id,\n name: workspace.name,\n region: workspace.region,\n createdAt: formatTimestamp(workspace.createTime),\n updatedAt: formatTimestamp(workspace.updateTime),\n };\n};\n\nexport const workspaceDetails = (workspace: Workspace): WorkspaceDetails => {\n return {\n ...workspaceInfo(workspace),\n deleteProtection: workspace.deleteProtection,\n organizationId: workspace.organizationId,\n folderId: workspace.folderId,\n };\n};\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient, type OperatorClient } from \"@/cli/shared/client\";\nimport {\n loadAccessToken,\n loadFolderId,\n loadOrganizationId,\n readPlatformConfig,\n writePlatformConfig,\n} from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { workspaceInfo, type WorkspaceInfo } from \"./transform\";\nimport type { ProfileInfo } from \"../profile\";\n\n/**\n * Schema for workspace creation options\n * - name: 3-63 chars, lowercase alphanumeric and hyphens, cannot start/end with hyphen\n * - organizationId, folderId: optional UUIDs\n */\nconst createWorkspaceOptionsSchema = z.object({\n name: z\n .string()\n .min(3, \"Name must be at least 3 characters\")\n .max(63, \"Name must be at most 63 characters\")\n .regex(/^[a-z0-9-]+$/, \"Name can only contain lowercase letters, numbers, and hyphens\")\n .refine(\n (n) => !n.startsWith(\"-\") && !n.endsWith(\"-\"),\n \"Name cannot start or end with a hyphen\",\n ),\n region: z.string(),\n deleteProtection: z.boolean().optional(),\n organizationId: z.uuid().optional(),\n folderId: z.uuid().optional(),\n});\n\nexport type CreateWorkspaceOptions = z.input<typeof createWorkspaceOptionsSchema>;\n\nconst validateRegion = async (region: string, client: OperatorClient) => {\n const availableRegions = await client.listAvailableWorkspaceRegions({});\n if (!availableRegions.regions.includes(region)) {\n throw new Error(`Region must be one of: ${availableRegions.regions.join(\", \")}.`);\n }\n};\n\n/**\n * Create a new workspace with the given options.\n * @param options - Workspace creation options\n * @returns Created workspace info\n */\nexport async function createWorkspace(options: CreateWorkspaceOptions): Promise<WorkspaceInfo> {\n // Validate options with zod schema\n const result = createWorkspaceOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n const validated = result.data;\n\n // Load client and validate region\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n await validateRegion(validated.region, client);\n\n // Resolve organization and folder IDs from options or environment variables\n const organizationId = loadOrganizationId(validated.organizationId);\n const folderId = loadFolderId(validated.folderId);\n\n // Create workspace\n const resp = await client.createWorkspace({\n workspaceName: validated.name,\n workspaceRegion: validated.region,\n deleteProtection: validated.deleteProtection ?? false,\n organizationId,\n folderId,\n });\n\n return workspaceInfo(resp.workspace!);\n}\n\nexport const createCommand = defineCommand({\n name: \"create\",\n description: \"Create a new Tailor Platform workspace.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n name: arg(z.string(), {\n alias: \"n\",\n description: \"Workspace name\",\n }),\n region: arg(z.string(), {\n alias: \"r\",\n description: \"Workspace region (us-west, asia-northeast)\",\n }),\n \"delete-protection\": arg(z.boolean().default(false), {\n alias: \"d\",\n description: \"Enable delete protection\",\n }),\n \"organization-id\": arg(z.string().optional(), {\n alias: \"o\",\n description: \"Organization ID to workspace associate with\",\n }),\n \"folder-id\": arg(z.string().optional(), {\n alias: \"f\",\n description: \"Folder ID to workspace associate with\",\n }),\n \"profile-name\": arg(z.string().optional(), {\n alias: \"p\",\n description: \"Profile name to create\",\n }),\n \"profile-user\": arg(z.string().optional(), {\n description: \"User email for the profile (defaults to current user)\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Execute workspace create logic\n const workspace = await createWorkspace({\n name: args.name,\n region: args.region,\n deleteProtection: args[\"delete-protection\"],\n organizationId: args[\"organization-id\"],\n folderId: args[\"folder-id\"],\n });\n\n let profileInfo: ProfileInfo | undefined;\n const profileName = args[\"profile-name\"];\n if (profileName) {\n const config = readPlatformConfig();\n if (config.profiles[profileName]) {\n throw new Error(`Profile \"${profileName}\" already exists.`);\n }\n\n const profileUser = args[\"profile-user\"] || config.current_user;\n if (!profileUser) {\n throw new Error(\n \"Current user not found. Please login or specify --profile-user to create a profile.\",\n );\n }\n\n if (!config.users[profileUser]) {\n throw new Error(\n `User \"${profileUser}\" not found.\\nPlease verify your user name and login using 'tailor-sdk login' command.`,\n );\n }\n config.profiles[profileName] = {\n user: profileUser,\n workspace_id: workspace.id,\n };\n writePlatformConfig(config);\n profileInfo = {\n name: profileName,\n user: profileUser,\n workspaceId: workspace.id,\n };\n\n if (!args.json) {\n logger.success(`Profile \"${profileName}\" created successfully.`);\n }\n }\n\n if (!args.json) {\n logger.success(`Workspace \"${args.name}\" created successfully.`);\n }\n\n if (args.json && profileInfo) {\n logger.out({ ...workspace, profile: profileInfo });\n return;\n }\n\n logger.out(workspace);\n if (profileInfo) {\n logger.out(\"Profile:\");\n logger.out(profileInfo);\n }\n }),\n});\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, confirmationArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, readPlatformConfig, writePlatformConfig } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nconst deleteWorkspaceOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }),\n});\n\nexport type DeleteWorkspaceOptions = z.input<typeof deleteWorkspaceOptionsSchema>;\n\nasync function loadOptions(options: DeleteWorkspaceOptions) {\n // Validate options with zod schema\n const result = deleteWorkspaceOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n\n return {\n client,\n workspaceId: result.data.workspaceId,\n };\n}\n\n/**\n * Delete a workspace by ID.\n * @param options - Workspace deletion options\n * @returns Promise that resolves when deletion completes\n */\nexport async function deleteWorkspace(options: DeleteWorkspaceOptions): Promise<void> {\n // Load and validate options\n const { client, workspaceId } = await loadOptions(options);\n\n // Delete workspace\n await client.deleteWorkspace({\n workspaceId,\n });\n}\n\nexport const deleteCommand = defineCommand({\n name: \"delete\",\n description: \"Delete a Tailor Platform workspace.\",\n args: z\n .object({\n ...commonArgs,\n \"workspace-id\": arg(z.string(), {\n alias: \"w\",\n description: \"Workspace ID\",\n }),\n ...confirmationArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n // Load and validate options\n const { client, workspaceId } = await loadOptions({\n workspaceId: args[\"workspace-id\"],\n });\n\n // Check if workspace exists\n let workspace;\n try {\n workspace = await client.getWorkspace({\n workspaceId,\n });\n } catch {\n throw new Error(`Workspace \"${workspaceId}\" not found.`);\n }\n\n // Confirm deletion if not forced\n if (!args.yes) {\n const confirmation = await logger.prompt(\n `Enter the workspace name to confirm deletion (${workspace.workspace?.name}):`,\n {\n type: \"text\",\n },\n );\n if (confirmation !== workspace.workspace?.name) {\n logger.info(\"Workspace deletion cancelled.\");\n return;\n }\n }\n\n // Delete workspace\n await client.deleteWorkspace({\n workspaceId,\n });\n\n // Remove profiles associated with the deleted workspace\n const pfConfig = readPlatformConfig();\n const profilesToDelete = Object.entries(pfConfig.profiles).filter(\n ([, profile]) => profile?.workspace_id === workspaceId,\n );\n if (profilesToDelete.length > 0) {\n for (const [profileName] of profilesToDelete) {\n delete pfConfig.profiles[profileName];\n }\n writePlatformConfig(pfConfig);\n }\n\n // Show success message\n if (profilesToDelete.length > 0) {\n logger.success(\n `Workspace \"${args[\"workspace-id\"]}\" and ${profilesToDelete.length} associated profile(s) deleted successfully.`,\n );\n } else {\n logger.success(`Workspace \"${args[\"workspace-id\"]}\" deleted successfully.`);\n }\n }),\n});\n","import { defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { humanizeRelativeTime } from \"@/cli/shared/format\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { workspaceDetails, type WorkspaceDetails } from \"./transform\";\n\nconst getWorkspaceOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n});\n\nexport type GetWorkspaceOptions = z.input<typeof getWorkspaceOptionsSchema>;\n\nasync function loadOptions(options: GetWorkspaceOptions) {\n const result = getWorkspaceOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n };\n}\n\n/**\n * Get detailed information about a workspace.\n * @param options - Workspace get options\n * @returns Workspace details\n */\nexport async function getWorkspace(options: GetWorkspaceOptions): Promise<WorkspaceDetails> {\n const { client, workspaceId } = await loadOptions(options);\n\n const response = await client.getWorkspace({\n workspaceId,\n });\n\n if (!response.workspace) {\n throw new Error(`Workspace \"${workspaceId}\" not found.`);\n }\n\n return workspaceDetails(response.workspace);\n}\n\nexport const getCommand = defineCommand({\n name: \"get\",\n description: \"Show detailed information about a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const workspace = await getWorkspace({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n });\n\n const formattedWorkspace = args.json\n ? workspace\n : {\n ...workspace,\n createdAt: humanizeRelativeTime(workspace.createdAt),\n updatedAt: humanizeRelativeTime(workspace.updatedAt),\n };\n\n logger.out(formattedWorkspace);\n }),\n});\n","import { defineCommand, arg } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, jsonArgs, positiveIntArg, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { workspaceInfo, type WorkspaceInfo } from \"./transform\";\n\nexport interface ListWorkspacesOptions {\n limit?: number;\n}\n\n/**\n * List workspaces with an optional limit.\n * @param options - Workspace listing options\n * @returns List of workspaces\n */\nexport async function listWorkspaces(options?: ListWorkspacesOptions): Promise<WorkspaceInfo[]> {\n const limit = options?.limit;\n const hasLimit = limit !== undefined;\n\n // Load and validate options\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n\n const results: WorkspaceInfo[] = [];\n let pageToken = \"\";\n\n // Fetch workspaces with optional limit using pageSize\n // If limit is undefined, this behaves like an unbounded listing.\n // If limit is set, we request up to the remaining number on each page\n // and stop once we have collected enough or the server has no more pages.\n\n while (true) {\n if (hasLimit && results.length >= limit!) {\n break;\n }\n\n const remaining = hasLimit ? limit! - results.length : undefined;\n const pageSize = remaining !== undefined && remaining > 0 ? remaining : undefined;\n\n const { workspaces, nextPageToken } = await client.listWorkspaces({\n pageToken,\n ...(pageSize !== undefined ? { pageSize } : {}),\n });\n\n const mapped = workspaces.map(workspaceInfo);\n\n if (remaining !== undefined && mapped.length > remaining) {\n results.push(...mapped.slice(0, remaining));\n } else {\n results.push(...mapped);\n }\n\n if (!nextPageToken) {\n break;\n }\n pageToken = nextPageToken;\n }\n\n return results;\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List all Tailor Platform workspaces.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n limit: arg(positiveIntArg.optional(), {\n alias: \"l\",\n description: \"Maximum number of workspaces to list\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const workspaces = await listWorkspaces({ limit: args.limit });\n logger.out(workspaces, { display: { updatedAt: null } });\n }),\n});\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, confirmationArgs, withCommonArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nconst restoreWorkspaceOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }),\n});\n\nexport type RestoreWorkspaceOptions = z.input<typeof restoreWorkspaceOptionsSchema>;\n\nasync function loadOptions(options: RestoreWorkspaceOptions) {\n const result = restoreWorkspaceOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n\n return {\n client,\n workspaceId: result.data.workspaceId,\n };\n}\n\n/**\n * Restore a deleted workspace by ID.\n * @param options - Workspace restore options\n * @returns Promise that resolves when restoration completes\n */\nexport async function restoreWorkspace(options: RestoreWorkspaceOptions): Promise<void> {\n const { client, workspaceId } = await loadOptions(options);\n\n await client.restoreWorkspace({\n workspaceId,\n });\n}\n\nexport const restoreCommand = defineCommand({\n name: \"restore\",\n description: \"Restore a deleted workspace\",\n args: z\n .object({\n ...commonArgs,\n \"workspace-id\": arg(z.string(), {\n alias: \"w\",\n description: \"Workspace ID\",\n }),\n ...confirmationArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const { client, workspaceId } = await loadOptions({\n workspaceId: args[\"workspace-id\"],\n });\n\n if (!args.yes) {\n const confirmation = await logger.prompt(\n `Are you sure you want to restore workspace \"${workspaceId}\"? (yes/no):`,\n {\n type: \"text\",\n },\n );\n if (confirmation !== \"yes\") {\n logger.info(\"Workspace restoration cancelled.\");\n return;\n }\n }\n\n await client.restoreWorkspace({\n workspaceId,\n });\n\n logger.success(`Workspace \"${workspaceId}\" restored successfully.`);\n }),\n});\n","import { WorkspacePlatformUserRole } from \"@tailor-proto/tailor/v1/workspace_resource_pb\";\nimport type { WorkspacePlatformUser } from \"@tailor-proto/tailor/v1/workspace_resource_pb\";\n\nexport interface UserInfo {\n userId: string;\n email: string;\n role: string;\n}\n\nconst roleToString = (role: WorkspacePlatformUserRole): string => {\n switch (role) {\n case WorkspacePlatformUserRole.ADMIN:\n return \"admin\";\n case WorkspacePlatformUserRole.EDITOR:\n return \"editor\";\n case WorkspacePlatformUserRole.VIEWER:\n return \"viewer\";\n default:\n return \"unknown\";\n }\n};\n\nexport const stringToRole = (role: string): WorkspacePlatformUserRole => {\n switch (role.toLowerCase()) {\n case \"admin\":\n return WorkspacePlatformUserRole.ADMIN;\n case \"editor\":\n return WorkspacePlatformUserRole.EDITOR;\n case \"viewer\":\n return WorkspacePlatformUserRole.VIEWER;\n default:\n throw new Error(`Invalid role: ${role}. Valid roles: admin, editor, viewer`);\n }\n};\n\nexport const userInfo = (user: WorkspacePlatformUser): UserInfo => {\n return {\n userId: user.platformUser?.userId ?? \"\",\n email: user.platformUser?.email ?? \"\",\n role: roleToString(user.role),\n };\n};\n\nexport const validRoles = [\"admin\", \"editor\", \"viewer\"] as const;\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { stringToRole, validRoles } from \"./transform\";\n\nconst inviteUserOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n email: z.email({ message: \"email must be a valid email address\" }),\n role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(\", \")}` }),\n});\n\nexport type InviteUserOptions = z.input<typeof inviteUserOptionsSchema>;\n\nasync function loadOptions(options: InviteUserOptions) {\n const result = inviteUserOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n email: result.data.email,\n role: stringToRole(result.data.role),\n };\n}\n\n/**\n * Invite a user to a workspace.\n * @param options - User invite options\n * @returns Promise that resolves when invitation is sent\n */\nexport async function inviteUser(options: InviteUserOptions): Promise<void> {\n const { client, workspaceId, email, role } = await loadOptions(options);\n\n await client.inviteWorkspacePlatformUser({\n workspaceId,\n email,\n role,\n });\n}\n\nexport const inviteCommand = defineCommand({\n name: \"invite\",\n description: \"Invite a user to a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...workspaceArgs,\n email: arg(z.email(), {\n description: \"Email address of the user to invite\",\n }),\n role: arg(z.enum(validRoles), {\n description: `Role to assign (${validRoles.join(\", \")})`,\n alias: \"r\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n await inviteUser({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n email: args.email,\n role: args.role as (typeof validRoles)[number],\n });\n\n logger.success(`User \"${args.email}\" invited successfully with role \"${args.role}\".`);\n }),\n});\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport {\n commonArgs,\n jsonArgs,\n positiveIntArg,\n withCommonArgs,\n workspaceArgs,\n} from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { userInfo, type UserInfo } from \"./transform\";\n\nconst listUsersOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n limit: z.coerce.number().int().positive().optional(),\n});\n\nexport type ListUsersOptions = z.input<typeof listUsersOptionsSchema>;\n\nasync function loadOptions(options: ListUsersOptions) {\n const result = listUsersOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n limit: result.data.limit,\n };\n}\n\n/**\n * List users in a workspace with an optional limit.\n * @param options - User listing options\n * @returns List of workspace users\n */\nexport async function listUsers(options: ListUsersOptions): Promise<UserInfo[]> {\n const { client, workspaceId, limit } = await loadOptions(options);\n const hasLimit = limit !== undefined;\n\n const results: UserInfo[] = [];\n let pageToken = \"\";\n\n while (true) {\n if (hasLimit && results.length >= limit!) {\n break;\n }\n\n const remaining = hasLimit ? limit! - results.length : undefined;\n const pageSize = remaining !== undefined && remaining > 0 ? remaining : undefined;\n\n const { workspacePlatformUsers, nextPageToken } = await client.listWorkspacePlatformUsers({\n workspaceId,\n pageToken,\n ...(pageSize !== undefined ? { pageSize } : {}),\n });\n\n const mapped = workspacePlatformUsers.map(userInfo);\n\n if (remaining !== undefined && mapped.length > remaining) {\n results.push(...mapped.slice(0, remaining));\n } else {\n results.push(...mapped);\n }\n\n if (!nextPageToken) {\n break;\n }\n pageToken = nextPageToken;\n }\n\n return results;\n}\n\nexport const listCommand = defineCommand({\n name: \"list\",\n description: \"List users in a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...workspaceArgs,\n limit: arg(positiveIntArg.optional(), {\n alias: \"l\",\n description: \"Maximum number of users to list\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const users = await listUsers({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n limit: args.limit,\n });\n\n logger.out(users);\n }),\n});\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, confirmationArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\n\nconst removeUserOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n email: z.string().email({ message: \"email must be a valid email address\" }),\n});\n\nexport type RemoveUserOptions = z.input<typeof removeUserOptionsSchema>;\n\nasync function loadOptions(options: RemoveUserOptions) {\n const result = removeUserOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n email: result.data.email,\n };\n}\n\n/**\n * Remove a user from a workspace.\n * @param options - User remove options\n * @returns Promise that resolves when removal completes\n */\nexport async function removeUser(options: RemoveUserOptions): Promise<void> {\n const { client, workspaceId, email } = await loadOptions(options);\n\n await client.removeWorkspacePlatformUser({\n workspaceId,\n email,\n });\n}\n\nexport const removeCommand = defineCommand({\n name: \"remove\",\n description: \"Remove a user from a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...workspaceArgs,\n email: arg(z.email(), {\n description: \"Email address of the user to remove\",\n }),\n ...confirmationArgs,\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n if (!args.yes) {\n const confirmation = await logger.prompt(\n `Are you sure you want to remove user \"${args.email}\" from the workspace? (yes/no):`,\n {\n type: \"text\",\n },\n );\n if (confirmation !== \"yes\") {\n logger.info(\"User removal cancelled.\");\n return;\n }\n }\n\n await removeUser({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n email: args.email,\n });\n\n logger.success(`User \"${args.email}\" removed from workspace.`);\n }),\n});\n","import { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { commonArgs, withCommonArgs, workspaceArgs } from \"@/cli/shared/args\";\nimport { initOperatorClient } from \"@/cli/shared/client\";\nimport { loadAccessToken, loadWorkspaceId } from \"@/cli/shared/context\";\nimport { logger } from \"@/cli/shared/logger\";\nimport { stringToRole, validRoles } from \"./transform\";\n\nconst updateUserOptionsSchema = z.object({\n workspaceId: z.uuid({ message: \"workspace-id must be a valid UUID\" }).optional(),\n profile: z.string().optional(),\n email: z.string().email({ message: \"email must be a valid email address\" }),\n role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(\", \")}` }),\n});\n\nexport type UpdateUserOptions = z.input<typeof updateUserOptionsSchema>;\n\nasync function loadOptions(options: UpdateUserOptions) {\n const result = updateUserOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken();\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n\n return {\n client,\n workspaceId,\n email: result.data.email,\n role: stringToRole(result.data.role),\n };\n}\n\n/**\n * Update a user's role in a workspace.\n * @param options - User update options\n * @returns Promise that resolves when update completes\n */\nexport async function updateUser(options: UpdateUserOptions): Promise<void> {\n const { client, workspaceId, email, role } = await loadOptions(options);\n\n await client.updateWorkspacePlatformUser({\n workspaceId,\n email,\n role,\n });\n}\n\nexport const updateCommand = defineCommand({\n name: \"update\",\n description: \"Update a user's role in a workspace\",\n args: z\n .object({\n ...commonArgs,\n ...workspaceArgs,\n email: arg(z.email(), {\n description: \"Email address of the user to update\",\n }),\n role: arg(z.enum(validRoles), {\n description: `New role to assign (${validRoles.join(\", \")})`,\n alias: \"r\",\n }),\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n await updateUser({\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n email: args.email,\n role: args.role as (typeof validRoles)[number],\n });\n\n logger.success(`User \"${args.email}\" updated to role \"${args.role}\".`);\n }),\n});\n","import * as fs from \"node:fs\";\nimport ml from \"multiline-ts\";\nimport * as path from \"pathe\";\nimport { resolveTSConfig } from \"pkg-types\";\nimport * as rolldown from \"rolldown\";\nimport { getDistDir } from \"@/cli/shared/dist-dir\";\nimport type { QueryEngine } from \"@/cli/query\";\n\nfunction createSqlEntry(): string {\n return ml /* ts */ `\n import { Kysely, sql } from \"@tailor-platform/sdk/kysely\";\n import { TailordbDialect } from \"@tailor-platform/function-kysely-tailordb\";\n\n type QueryInput = {\n namespace: string;\n queries: string[];\n };\n\n function getDB(namespace: string) {\n const client = new tailordb.Client({ namespace });\n return new Kysely<Record<string, Record<string, unknown>>>({\n dialect: new TailordbDialect(client),\n });\n }\n\n export async function main(input: QueryInput) {\n const db = getDB(input.namespace);\n const results = [];\n for (const query of input.queries) {\n const result = await sql.raw(query).execute(db);\n const rows = result.rows ?? [];\n results.push({ rows, rowCount: rows.length });\n }\n if (results.length === 1) {\n return results[0];\n }\n return results;\n }\n `;\n}\n\nfunction createGqlEntry(): string {\n return ml /* ts */ `\n type QueryInput = {\n endpoint: string;\n accessToken: string;\n query: string;\n };\n\n export async function main(input: QueryInput) {\n const response = await fetch(input.endpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: \\`Bearer \\${input.accessToken}\\`,\n },\n body: JSON.stringify({\n query: input.query,\n }),\n });\n if (!response.ok) {\n let message = \\`HTTP \\${response.status}\\`;\n try {\n const errorJson = await response.json();\n if (errorJson && typeof errorJson === \"object\" && \"message\" in errorJson) {\n message = String(errorJson.message);\n }\n } catch {\n // Keep default HTTP status message when response body is not JSON.\n }\n throw new Error(\\`GraphQL request failed: \\${message}\\`);\n }\n\n const json = await response.json();\n return json;\n }\n `;\n}\n\n/**\n * Bundle a query executor script for TestExecScript.\n * @param engine - Query engine type\n * @returns Bundled code\n */\nexport async function bundleQueryScript(engine: QueryEngine): Promise<string> {\n const outputDir = path.resolve(getDistDir(), \"query\");\n fs.mkdirSync(outputDir, { recursive: true });\n\n const entryPath = path.join(outputDir, `query_${engine}.entry.ts`);\n const outputPath = path.join(outputDir, `query_${engine}.js`);\n const entryContent = engine === \"sql\" ? createSqlEntry() : createGqlEntry();\n fs.writeFileSync(entryPath, entryContent);\n\n let tsconfig: string | undefined;\n try {\n tsconfig = await resolveTSConfig();\n } catch {\n tsconfig = undefined;\n }\n\n await rolldown.build(\n rolldown.defineConfig({\n input: entryPath,\n output: {\n file: outputPath,\n format: \"esm\",\n sourcemap: false,\n minify: false,\n inlineDynamicImports: true,\n globals: {\n tailordb: \"tailordb\",\n },\n },\n external: engine === \"sql\" ? [\"tailordb\"] : [],\n resolve: {\n conditionNames: [\"node\", \"import\"],\n },\n tsconfig,\n treeshake: {\n moduleSideEffects: false,\n annotations: true,\n unknownGlobalSideEffects: false,\n },\n logLevel: \"silent\",\n }) as rolldown.BuildOptions,\n );\n\n return fs.readFileSync(outputPath, \"utf-8\");\n}\n","import { CLIError } from \"../shared/errors\";\nimport type { QueryEngine } from \".\";\n\nfunction toErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n return String(error);\n}\n\ntype MapQueryExecutionErrorArgs = {\n error: unknown;\n engine: QueryEngine;\n namespace: string | undefined;\n machineUser?: string;\n};\n\n/**\n * Maps errors from query execution to user-friendly CLI errors with suggestions when possible.\n * @param args - The error and context information for mapping\n * @returns A CLIError with a user-friendly message\n */\nexport function mapQueryExecutionError(args: MapQueryExecutionErrorArgs): Error {\n const message = toErrorMessage(args.error);\n\n if (message.includes(\"machine user does not exist\")) {\n return CLIError({\n code: \"not_found\",\n message: `Machine user '${args.machineUser ?? \"unknown\"}' was not found.`,\n suggestion: \"Run `tailor-sdk machineuser list` and use an existing name.\",\n });\n }\n\n if (\n args.engine === \"sql\" &&\n message.includes(\n \"sqlaccess error: failed to fetch schema: query returned an unexpected number of rows\",\n )\n ) {\n return CLIError({\n code: \"invalid_namespace\",\n message: `Failed to load TailorDB schema for namespace '${args.namespace}'.`,\n suggestion:\n \"Ensure the query references TailorDB types from a single namespace and re-apply if needed.\",\n });\n }\n\n if (args.engine === \"sql\" && message.includes(\"sqlaccess error: failed to parse:\")) {\n const parserReason = message\n .split(\"sqlaccess error: failed to parse:\")\n .at(1)\n ?.split(\"\\n\")\n .at(0)\n ?.trim();\n\n return CLIError({\n code: \"invalid_sql\",\n message: \"SQL parse error.\",\n suggestion: parserReason ?? \"The SQL query contains unsupported syntax.\",\n });\n }\n\n return args.error instanceof Error ? args.error : new Error(message);\n}\n","import { parse } from \"@0no-co/graphql.web\";\n\n/**\n * Return true when the buffered GraphQL input parses as a complete document.\n * @param input - Buffered GraphQL input\n * @returns True when the GraphQL document is complete and ready to execute\n */\nexport function isGraphQLInputComplete(input: string): boolean {\n if (input.trim().length === 0) {\n return false;\n }\n\n try {\n parse(input);\n return true;\n } catch {\n return false;\n }\n}\n","/**\n * Return true when the buffered SQL input ends with a real statement terminator.\n * @param input - Buffered SQL input\n * @returns True when the SQL statement is complete and ready to execute\n */\nexport function isSqlInputComplete(input: string): boolean {\n let inSingleQuote = false;\n let inDoubleQuote = false;\n let inLineComment = false;\n let blockCommentDepth = 0;\n let dollarQuoteTag: string | null = null;\n let lastSignificantTokenWasSemicolon = false;\n\n for (let i = 0; i < input.length; i += 1) {\n const char = input[i];\n const next = input[i + 1];\n\n if (inLineComment) {\n if (char === \"\\n\") {\n inLineComment = false;\n }\n continue;\n }\n\n if (blockCommentDepth > 0) {\n if (char === \"/\" && next === \"*\") {\n blockCommentDepth += 1;\n i += 1;\n continue;\n }\n if (char === \"*\" && next === \"/\") {\n blockCommentDepth -= 1;\n i += 1;\n }\n continue;\n }\n\n if (dollarQuoteTag != null) {\n if (input.startsWith(dollarQuoteTag, i)) {\n i += dollarQuoteTag.length - 1;\n dollarQuoteTag = null;\n }\n continue;\n }\n\n if (inSingleQuote) {\n if (char === \"'\" && next === \"'\") {\n i += 1;\n continue;\n }\n if (char === \"'\") {\n inSingleQuote = false;\n }\n continue;\n }\n\n if (inDoubleQuote) {\n if (char === '\"' && next === '\"') {\n i += 1;\n continue;\n }\n if (char === '\"') {\n inDoubleQuote = false;\n }\n continue;\n }\n\n if (char === \"-\" && next === \"-\") {\n inLineComment = true;\n i += 1;\n continue;\n }\n\n if (char === \"/\" && next === \"*\") {\n blockCommentDepth = 1;\n i += 1;\n continue;\n }\n\n if (char === \"'\") {\n lastSignificantTokenWasSemicolon = false;\n inSingleQuote = true;\n continue;\n }\n\n if (char === '\"') {\n lastSignificantTokenWasSemicolon = false;\n inDoubleQuote = true;\n continue;\n }\n\n if (char === \"$\") {\n const rest = input.slice(i);\n const match = rest.match(/^\\$[A-Za-z_][A-Za-z0-9_]*\\$/) ?? rest.match(/^\\$\\$/);\n if (match != null) {\n lastSignificantTokenWasSemicolon = false;\n dollarQuoteTag = match[0];\n i += match[0].length - 1;\n continue;\n }\n }\n\n if (char === \";\") {\n lastSignificantTokenWasSemicolon = true;\n continue;\n }\n\n if (!/\\s/.test(char)) {\n lastSignificantTokenWasSemicolon = false;\n }\n }\n\n return (\n lastSignificantTokenWasSemicolon &&\n !inSingleQuote &&\n !inDoubleQuote &&\n blockCommentDepth === 0 &&\n dollarQuoteTag == null\n );\n}\n","import { astVisitor, parse, type From, type Statement } from \"pgsql-ast-parser\";\n\n/**\n * Extract TailorDB type names from SQL query.\n * @param query - SQL query\n * @returns Type names referenced by query\n */\nexport function extractTypeNamesFromSql(query: string): string[] {\n let statements: Statement[];\n try {\n statements = parse(query);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(\n `SQL parse error: ${message}\\nIf your table name is a reserved keyword (e.g. User), wrap it in double quotes: SELECT * FROM \"User\"`,\n );\n }\n const typeNames = new Set<string>();\n\n const visitor = astVisitor((mapper) => ({\n tableRef: (tableRef) => {\n typeNames.add(tableRef.name);\n\n mapper.super().tableRef(tableRef);\n return tableRef;\n },\n }));\n\n for (const statement of statements) {\n visitor.statement(statement);\n }\n\n return [...typeNames];\n}\n\nfunction collectAliasMap(fromClauses: From[]): Map<string, string> {\n const aliasMap = new Map<string, string>();\n\n for (const from of fromClauses) {\n if (from.type === \"table\") {\n const tableName = from.name.name;\n const alias = from.name.alias ?? tableName;\n aliasMap.set(alias, tableName);\n }\n }\n\n return aliasMap;\n}\n\nexport type ColumnSlot =\n | { type: \"explicit\"; name: string }\n | { type: \"wildcard\"; typeNames: string[] };\n\n/**\n * Extract the column template from a SQL query's SELECT clause.\n * Returns an ordered list of column slots representing explicit columns\n * and wildcard expansions with their resolved type names.\n *\n * Only inspects the top-level SELECT statement, not subqueries.\n * TailorDB's sqlaccess does not currently support subqueries in FROM clauses,\n * but we intentionally avoid recursing into nested SELECTs to prevent\n * false positives if the parser accepts such queries.\n * @param query - SQL query\n * @returns Column slots if wildcards are present, null otherwise\n */\nexport function extractColumnTemplate(query: string): ColumnSlot[] | null {\n try {\n const statements = parse(query);\n\n for (const statement of statements) {\n if (statement.type !== \"select\" || !statement.columns) {\n continue;\n }\n\n const aliasMap = collectAliasMap(statement.from ?? []);\n const slots: ColumnSlot[] = [];\n let hasWildcard = false;\n\n for (const column of statement.columns) {\n if (column.expr.type === \"ref\" && column.expr.name === \"*\") {\n hasWildcard = true;\n if (column.expr.table) {\n const typeName = aliasMap.get(column.expr.table.name);\n slots.push({ type: \"wildcard\", typeNames: typeName ? [typeName] : [] });\n } else {\n slots.push({ type: \"wildcard\", typeNames: [...new Set(aliasMap.values())] });\n }\n } else {\n const name = column.alias?.name ?? (column.expr.type === \"ref\" ? column.expr.name : null);\n if (name) {\n slots.push({ type: \"explicit\", name });\n }\n }\n }\n\n return hasWildcard ? slots : null;\n }\n\n return null;\n } catch {\n return null;\n }\n}\n","import { pathToFileURL } from \"node:url\";\nimport { loadFilesWithIgnores } from \"@/cli/services/file-loader\";\nimport { TailorDBTypeSchema } from \"@/parser/service/tailordb\";\nimport type { LoadedConfig } from \"@/cli/shared/config-loader\";\n\ntype TypeFieldOrderMap = Map<string, string[]>;\n\n/**\n * Load field definition order for all TailorDB types in a namespace.\n * @param config - Loaded application configuration\n * @param namespace - TailorDB namespace name\n * @returns Map of type name to field names in definition order\n */\nexport async function loadTypeFieldOrder(\n config: LoadedConfig,\n namespace: string,\n): Promise<TypeFieldOrderMap> {\n const fieldOrder: TypeFieldOrderMap = new Map();\n const dbConfig = config.db?.[namespace];\n\n if (!dbConfig || !(\"files\" in dbConfig) || dbConfig.files.length === 0) {\n return fieldOrder;\n }\n\n const typeFiles = loadFilesWithIgnores(dbConfig);\n\n await Promise.all(\n typeFiles.map(async (typeFile) => {\n try {\n const module = await import(pathToFileURL(typeFile).href);\n\n for (const exportedValue of Object.values(module)) {\n const result = TailorDBTypeSchema.safeParse(exportedValue);\n if (!result.success) {\n continue;\n }\n\n fieldOrder.set(result.data.name, Object.keys(result.data.fields));\n }\n } catch {\n // Skip files that fail to load\n }\n }),\n );\n\n return fieldOrder;\n}\n","import * as fs from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { createInterface } from \"node:readline/promises\";\nimport { create } from \"@bufbuild/protobuf\";\nimport {\n AuthInvokerSchema,\n type AuthInvoker,\n type MachineUser,\n} from \"@tailor-proto/tailor/v1/auth_resource_pb\";\nimport * as path from \"pathe\";\nimport { parse as parseSql, toSql } from \"pgsql-ast-parser\";\nimport { arg, defineCommand } from \"politty\";\nimport { z } from \"zod\";\nimport { bundleQueryScript } from \"../bundler/query/query-bundler\";\nimport { commonArgs, deploymentArgs, jsonArgs, withCommonArgs } from \"../shared/args\";\nimport { fetchMachineUserToken, initOperatorClient } from \"../shared/client\";\nimport { extractAllNamespaces } from \"../shared/config\";\nimport { loadConfig, type LoadedConfig } from \"../shared/config-loader\";\nimport { loadAccessToken, loadWorkspaceId } from \"../shared/context\";\nimport { getEditorCommand, openInEditor } from \"../shared/editor\";\nimport { isCLIError } from \"../shared/errors\";\nimport { logger } from \"../shared/logger\";\nimport { executeScript } from \"../shared/script-executor\";\nimport { resolveTypeNamespaces } from \"../shared/tailordb-namespace\";\nimport { mapQueryExecutionError } from \"./errors\";\nimport { isGraphQLInputComplete } from \"./graphql-repl\";\nimport { isSqlInputComplete } from \"./sql-repl\";\nimport {\n extractColumnTemplate,\n extractTypeNamesFromSql,\n type ColumnSlot,\n} from \"./sql-type-extractor\";\nimport { loadTypeFieldOrder } from \"./type-field-order\";\nimport type { Application } from \"@tailor-proto/tailor/v1/application_resource_pb\";\n\nconst queryEngineSchema = z.enum([\"sql\", \"gql\"]);\nconst queryBaseOptionsSchema = z.object({\n workspaceId: z.string().optional(),\n profile: z.string().optional(),\n configPath: z.string().optional(),\n engine: queryEngineSchema,\n machineUser: z.string(),\n});\nconst queryOptionsSchema = queryBaseOptionsSchema.extend({\n query: z.string(),\n});\n\nexport type QueryEngine = z.infer<typeof queryEngineSchema>;\ntype QueryOptions = z.input<typeof queryOptionsSchema>;\ntype QueryBaseOptions = z.input<typeof queryBaseOptionsSchema>;\ntype QuerySharedOptions = Omit<QueryOptions, \"engine\">;\ntype Client = Awaited<ReturnType<typeof initOperatorClient>>;\n\ntype SQLQueryDispatchResult = {\n engine: \"sql\";\n namespace: string;\n query: string;\n result: unknown;\n};\n\ntype GQLQueryDispatchResult = {\n engine: \"gql\";\n query: string;\n result: unknown;\n};\n\ntype QueryDispatchResult = SQLQueryDispatchResult | GQLQueryDispatchResult;\n\ntype SQLResultRow = Record<string, unknown>;\ntype SQLExecutionResult = {\n rows: SQLResultRow[];\n rowCount: number;\n};\n\ntype QueryCommandInput =\n | {\n mode: \"query\";\n query: string;\n }\n | {\n mode: \"repl\";\n }\n | {\n mode: \"abort\";\n };\n\ntype ReplCommand = \"quit\" | \"help\" | \"clear\" | \"unknown\";\ntype ReplInterruptAction = \"exit\" | \"clear\";\n\nasync function getNamespaceFromSqlQuery(\n workspaceId: string,\n query: string,\n client: Client,\n namespaces: string[],\n): Promise<string> {\n if (namespaces.length === 0) {\n throw new Error(\"No namespaces found in configuration.\");\n }\n\n if (namespaces.length === 1) {\n return namespaces[0];\n }\n\n const typeNames = extractTypeNamesFromSql(query);\n if (typeNames.length === 0) {\n throw new Error(\n `Could not infer namespace from query. Detected namespaces: ${namespaces.join(\", \")}.`,\n );\n }\n\n const typeNamespaceMap = await resolveTypeNamespaces({\n workspaceId,\n namespaces,\n typeNames,\n client,\n });\n\n const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));\n if (notFoundTypes.length > 0) {\n throw new Error(`Could not find namespace for types in query: ${notFoundTypes.join(\", \")}.`);\n }\n\n const namespacesFromTypes = new Set(typeNamespaceMap.values());\n if (namespacesFromTypes.size === 1) {\n return [...namespacesFromTypes][0];\n }\n\n throw new Error(\n `Query references types from multiple namespaces: ${[...namespacesFromTypes].join(\", \")}.`,\n );\n}\n\nasync function loadOptions(options: QueryBaseOptions) {\n const result = queryBaseOptionsSchema.safeParse(options);\n\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const accessToken = await loadAccessToken({\n useProfile: true,\n profile: result.data.profile,\n });\n const client = await initOperatorClient(accessToken);\n const workspaceId = loadWorkspaceId({\n workspaceId: result.data.workspaceId,\n profile: result.data.profile,\n });\n const { config } = await loadConfig(options.configPath);\n const namespaces = extractAllNamespaces(config);\n const { application } = await client.getApplication({\n workspaceId,\n applicationName: config.name,\n });\n\n if (!application?.authNamespace) {\n throw new Error(`Application ${config.name} does not have an auth configuration.`);\n }\n\n const { machineUser: machineUserResource } = await client.getAuthMachineUser({\n workspaceId: workspaceId,\n authNamespace: application.authNamespace,\n name: result.data.machineUser,\n });\n\n if (!machineUserResource) {\n throw new Error(`Machine user ${result.data.machineUser} not found.`);\n }\n\n return {\n engine: result.data.engine,\n client,\n workspaceId,\n config,\n application,\n machineUserResource,\n namespaces,\n };\n}\n\nasync function sqlQuery(\n client: Client,\n invoker: AuthInvoker,\n args: {\n workspaceId: string;\n namespace: string;\n bundledCode: string;\n query: string;\n },\n): Promise<SQLQueryDispatchResult> {\n const queries = splitSqlStatements(args.query);\n const executed = await executeScript({\n client,\n workspaceId: args.workspaceId,\n name: `query-sql-${args.namespace}.js`,\n code: args.bundledCode,\n arg: JSON.stringify({\n namespace: args.namespace,\n queries,\n }),\n invoker,\n });\n\n if (!executed.success) {\n throw new Error(executed.error);\n }\n\n return {\n engine: \"sql\" as const,\n namespace: args.namespace,\n query: args.query,\n result: parseExecutionResult(executed.result),\n };\n}\n\nasync function gqlQuery(\n client: Client,\n invoker: AuthInvoker,\n application: Application,\n machineUser: MachineUser,\n args: {\n workspaceId: string;\n bundledCode: string;\n query: string;\n },\n): Promise<GQLQueryDispatchResult> {\n const { access_token: accessToken } = await fetchMachineUserToken(\n application.url,\n machineUser.clientId,\n machineUser.clientSecret,\n );\n\n const executed = await executeScript({\n client,\n workspaceId: args.workspaceId,\n name: `query-gql.js`,\n code: args.bundledCode,\n arg: JSON.stringify({\n endpoint: `${application.url}/query`,\n accessToken,\n query: args.query,\n }),\n invoker,\n });\n\n if (!executed.success) {\n throw new Error(executed.error);\n }\n\n return {\n engine: \"gql\" as const,\n query: args.query,\n result: parseExecutionResult(executed.result),\n };\n}\n\nfunction parseExecutionResult(result: string): unknown {\n if (!result) {\n return null;\n }\n\n try {\n return JSON.parse(result);\n } catch {\n return result;\n }\n}\n\n/**\n * Resolve query input mode from CLI args.\n * @param args - Query input flags\n * @param args.query - Direct query string\n * @param args.file - File path containing query text\n * @param args.edit - Open a query editor instead of REPL\n * @param args.engine - Query engine used to choose temp file extension\n * @returns Normalized input mode\n */\nexport async function resolveQueryCommandInput(args: {\n query?: string;\n file?: string;\n edit?: boolean;\n engine: QueryEngine;\n}): Promise<QueryCommandInput> {\n if (args.query != null) {\n return {\n mode: \"query\",\n query: args.query,\n };\n }\n\n if (args.file != null) {\n return {\n mode: \"query\",\n query: await fs.readFile(args.file, \"utf-8\"),\n };\n }\n\n if (args.edit) {\n return await resolveEditedQueryInput(args.engine);\n }\n\n return {\n mode: \"repl\",\n };\n}\n\nasync function resolveEditedQueryInput(engine: QueryEngine): Promise<QueryCommandInput> {\n if (!process.stdin.isTTY || !process.stdout.isTTY) {\n throw new Error(\n \"Non-interactive terminals are not supported. Pass -q/--query or -f/--file to run a query.\",\n );\n }\n\n const editor = getEditorCommand();\n\n const tempDir = await fs.mkdtemp(path.join(tmpdir(), \"tailor-query-\"));\n const fileExtension = engine === \"sql\" ? \"sql\" : \"graphql\";\n const filePath = path.join(tempDir, `query.${fileExtension}`);\n const initialQuery = \"\";\n\n try {\n await fs.writeFile(filePath, initialQuery, \"utf-8\");\n try {\n await openInEditor(filePath, editor);\n } catch (error) {\n throw new Error(\n `Failed to open query editor \"${editor}\": ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n const editedQuery = await fs.readFile(filePath, \"utf-8\");\n if (editedQuery.trim().length === 0 || editedQuery === initialQuery) {\n return {\n mode: \"abort\",\n };\n }\n\n return {\n mode: \"query\",\n query: editedQuery,\n };\n } finally {\n await fs.rm(tempDir, { recursive: true, force: true });\n }\n}\n\n/**\n * Dispatch query execution.\n * @param options - Query command options\n * @returns Dispatch result\n */\nexport async function query(options: QueryOptions): Promise<QueryDispatchResult> {\n const result = queryOptionsSchema.safeParse(options);\n if (!result.success) {\n throw new Error(result.error.issues[0].message);\n }\n\n const executor = await prepareQueryExecutor(result.data);\n return await executor(result.data.query);\n}\n\nasync function prepareQueryExecutor(\n options: QueryBaseOptions,\n): Promise<(query: string) => Promise<QueryDispatchResult>> {\n const { client, workspaceId, config, application, machineUserResource, engine, namespaces } =\n await loadOptions(options);\n const bundledCode = await bundleQueryScript(engine);\n const invoker = create(AuthInvokerSchema, {\n namespace: application.authNamespace,\n machineUserName: machineUserResource.name,\n });\n\n return async (queryString: string) => {\n let namespace: string | undefined;\n\n try {\n switch (engine) {\n case \"sql\": {\n namespace = await getNamespaceFromSqlQuery(workspaceId, queryString, client, namespaces);\n const result = await sqlQuery(client, invoker, {\n workspaceId,\n namespace,\n bundledCode,\n query: queryString,\n });\n return reorderSqlColumns(result, config, namespace, queryString);\n }\n case \"gql\":\n return await gqlQuery(client, invoker, application, machineUserResource, {\n workspaceId,\n bundledCode,\n query: queryString,\n });\n default:\n throw new Error(`Unsupported query engine: ${engine satisfies never}`);\n }\n } catch (error) {\n throw mapQueryExecutionError({\n error,\n engine,\n namespace,\n machineUser: options.machineUser,\n });\n }\n };\n}\n\nfunction isReadlineTerminationError(error: unknown): boolean {\n if (!(error instanceof Error) || !(\"code\" in error)) {\n return false;\n }\n return error.code === \"ABORT_ERR\" || error.code === \"ERR_USE_AFTER_CLOSE\";\n}\n\n/**\n * Resolve a backslash REPL command into its normalized action.\n * @param input - Raw user input\n * @returns Normalized REPL command, or null for non-command input\n */\nexport function resolveReplCommand(input: string): ReplCommand | null {\n const trimmed = input.trim();\n if (!trimmed.startsWith(\"\\\\\")) {\n return null;\n }\n\n if (trimmed === \"\\\\q\" || trimmed === \"\\\\quit\") {\n return \"quit\";\n }\n\n if (trimmed === \"\\\\help\" || trimmed === \"\\\\h\" || trimmed === \"\\\\?\") {\n return \"help\";\n }\n\n if (trimmed === \"\\\\clear\" || trimmed === \"\\\\c\") {\n return \"clear\";\n }\n\n return \"unknown\";\n}\n\n/**\n * Decide how REPL should react to Ctrl+C based on current buffered input.\n * @param bufferedLines - Previously accepted lines in the current statement buffer\n * @param currentLine - In-progress line currently being edited\n * @returns Whether to clear the buffer or exit the REPL\n */\nexport function resolveReplInterruptAction(\n bufferedLines: string[],\n currentLine: string,\n): ReplInterruptAction {\n if (bufferedLines.length === 0 && currentLine.length === 0) {\n return \"exit\";\n }\n\n return \"clear\";\n}\n\n/**\n * Clear the interactive terminal screen and move the cursor to the top-left.\n */\nfunction clearReplScreen(): void {\n process.stdout.write(\"\\u001Bc\");\n}\n\nasync function runRepl(\n options: QueryBaseOptions & {\n json?: boolean;\n },\n): Promise<void> {\n if (!process.stdin.isTTY || !process.stdout.isTTY) {\n throw new Error(\n \"Non-interactive terminals are not supported. Pass -q/--query or -f/--file to run a query.\",\n );\n }\n\n const execute = await prepareQueryExecutor(options);\n const rl = createInterface({\n input: process.stdin,\n output: process.stdout,\n });\n\n logger.info(`Entering ${options.engine.toUpperCase()} REPL mode.`);\n logger.info(\"Type \\\\help for usage, \\\\q to quit.\");\n\n const lines: string[] = [];\n\n try {\n while (true) {\n const prompt = lines.length === 0 ? `${options.engine}> ` : \" \";\n let line: string;\n let interruptAction: ReplInterruptAction | null = null;\n const controller = new AbortController();\n const handleSigint = () => {\n interruptAction = resolveReplInterruptAction(lines, rl.line);\n if (interruptAction === \"clear\") {\n lines.length = 0;\n rl.write(null, {\n ctrl: true,\n name: \"u\",\n });\n process.stdout.write(\"\\n\");\n } else {\n rl.close();\n }\n controller.abort();\n };\n\n rl.once(\"SIGINT\", handleSigint);\n\n try {\n line = await rl.question(prompt, {\n signal: controller.signal,\n });\n } catch (error) {\n rl.off(\"SIGINT\", handleSigint);\n if (controller.signal.aborted) {\n if (interruptAction === \"exit\") {\n return;\n }\n continue;\n }\n if (isReadlineTerminationError(error)) {\n return;\n }\n throw error;\n } finally {\n rl.off(\"SIGINT\", handleSigint);\n }\n const trimmed = line.trim();\n\n if (lines.length === 0 && trimmed === \"\") {\n continue;\n }\n\n if (lines.length === 0) {\n const command = resolveReplCommand(trimmed);\n if (command === \"quit\") {\n return;\n }\n if (command === \"help\") {\n printReplHelp(options.engine);\n continue;\n }\n if (command === \"clear\") {\n clearReplScreen();\n continue;\n }\n if (command === \"unknown\") {\n logger.warn(`Unknown command: ${trimmed}`);\n continue;\n }\n }\n\n lines.push(line);\n\n if (options.engine === \"sql\") {\n if (!isSqlInputComplete(lines.join(\"\\n\"))) {\n continue;\n }\n } else if (!isGraphQLInputComplete(lines.join(\"\\n\"))) {\n continue;\n }\n\n const statement = getReplStatement(lines, options.engine);\n lines.length = 0;\n\n if (statement.length === 0) {\n continue;\n }\n\n try {\n if (options.engine === \"sql\") {\n const result = await execute(statement);\n if (result.engine !== \"sql\") {\n throw new Error(`Expected sql engine result but got: ${result.engine}`);\n }\n printSqlResult(result, { json: options.json });\n continue;\n }\n\n const result = await execute(statement);\n if (result.engine !== \"gql\") {\n throw new Error(`Expected gql engine result but got: ${result.engine}`);\n }\n printGqlResult(result, { json: options.json });\n } catch (error) {\n if (isCLIError(error)) {\n logger.log(error.format());\n continue;\n }\n if (error instanceof Error) {\n logger.error(error.message);\n continue;\n }\n logger.error(String(error));\n }\n }\n } finally {\n rl.close();\n }\n}\n\nfunction getReplStatement(lines: string[], engine: QueryEngine): string {\n if (engine === \"sql\") {\n return lines.join(\"\\n\").trim();\n }\n\n let end = lines.length;\n while (end > 0 && lines[end - 1].trim() === \"\") {\n end -= 1;\n }\n return lines.slice(0, end).join(\"\\n\").trim();\n}\n\nfunction printReplHelp(engine: QueryEngine): void {\n logger.log(\"REPL commands:\");\n logger.log(\" \\\\help, \\\\h, \\\\? Show this help\");\n logger.log(\" Ctrl+C Clear current input\");\n logger.log(\" \\\\q, \\\\quit, Ctrl+D Exit REPL\");\n logger.log(\" \\\\clear, \\\\c Clear the screen\");\n if (engine === \"sql\") {\n logger.log(\"SQL execution: statement ending with ';' runs immediately.\");\n return;\n }\n logger.log(\"GraphQL execution: a complete GraphQL document runs immediately.\");\n}\n\n/**\n * Execute SQL query directly.\n * @param options - Shared query options\n * @returns SQL query result\n */\nasync function querySql(options: QuerySharedOptions): Promise<SQLQueryDispatchResult> {\n const result = await query({\n ...options,\n engine: \"sql\",\n });\n\n if (result.engine !== \"sql\") {\n throw new Error(`Expected sql engine result but got: ${result.engine}`);\n }\n\n return result;\n}\n\n/**\n * Execute GraphQL query directly.\n * @param options - Shared query options\n * @returns GraphQL query result\n */\nasync function queryGql(options: QuerySharedOptions): Promise<GQLQueryDispatchResult> {\n const result = await query({\n ...options,\n engine: \"gql\",\n });\n\n if (result.engine !== \"gql\") {\n throw new Error(`Expected gql engine result but got: ${result.engine}`);\n }\n\n return result;\n}\n\nasync function reorderSqlColumns(\n result: SQLQueryDispatchResult,\n config: LoadedConfig,\n namespace: string,\n sqlQuery: string,\n): Promise<SQLQueryDispatchResult> {\n if (!isSQLExecutionResult(result.result) || result.result.rows.length === 0) {\n return result;\n }\n\n const template = extractColumnTemplate(sqlQuery);\n if (!template) {\n return result;\n }\n\n try {\n const fieldOrder = await loadTypeFieldOrder(config, namespace);\n const expectedOrder = buildExpectedColumnOrder(template, fieldOrder);\n if (expectedOrder.length === 0) {\n return result;\n }\n\n const orderedRows = result.result.rows.map((row) => reorderRowByTemplate(row, expectedOrder));\n\n return {\n ...result,\n result: {\n ...result.result,\n rows: orderedRows,\n },\n };\n } catch {\n return result;\n }\n}\n\nconst SYSTEM_FIELD_ORDER = [\"id\"];\n\nfunction buildExpectedColumnOrder(\n template: ColumnSlot[],\n fieldOrder: Map<string, string[]>,\n): string[] {\n const order: string[] = [];\n\n for (const slot of template) {\n if (slot.type === \"explicit\") {\n order.push(slot.name);\n } else {\n for (const typeName of slot.typeNames) {\n order.push(...SYSTEM_FIELD_ORDER);\n order.push(...(fieldOrder.get(typeName) ?? []));\n }\n }\n }\n\n return order;\n}\n\nfunction reorderRowByTemplate(row: SQLResultRow, expectedOrder: string[]): SQLResultRow {\n const ordered: SQLResultRow = {};\n const rowKeys = new Set(Object.keys(row));\n\n // Build case-insensitive lookup: lowercased key → original key in row.\n // pgsql-ast-parser lowercases unquoted identifiers (PostgreSQL standard),\n // but TailorDB preserves the original case, so we need case-insensitive matching.\n const lowerToOriginal = new Map<string, string>();\n for (const key of rowKeys) {\n lowerToOriginal.set(key.toLowerCase(), key);\n }\n\n for (const key of expectedOrder) {\n const original = lowerToOriginal.get(key.toLowerCase());\n if (original != null && rowKeys.has(original)) {\n ordered[original] = row[original];\n rowKeys.delete(original);\n lowerToOriginal.delete(key.toLowerCase());\n }\n }\n\n for (const key of rowKeys) {\n ordered[key] = row[key];\n }\n\n return ordered;\n}\n\nexport const queryCommand = defineCommand({\n name: \"query\",\n description: \"Run SQL/GraphQL query.\",\n args: z\n .object({\n ...commonArgs,\n ...jsonArgs,\n ...deploymentArgs,\n engine: arg(queryEngineSchema, {\n description: \"Query engine (sql or gql)\",\n }),\n query: arg(z.string().optional(), {\n alias: \"q\",\n description: \"Query string to execute directly; omit to start REPL mode\",\n }),\n file: arg(z.string().optional(), {\n alias: \"f\",\n description: \"Read query string from file; omit to start REPL mode\",\n }),\n edit: arg(z.boolean().default(false), {\n description: \"Open a temporary file in your editor; omit to start REPL mode\",\n }),\n machineuser: arg(z.string(), {\n alias: \"m\",\n description: \"Machine user name for query execution\",\n }),\n })\n .superRefine((args, ctx) => {\n if (args.query != null && args.file != null) {\n ctx.addIssue({\n code: \"custom\",\n path: [\"file\"],\n message: \"Pass either -q/--query or -f/--file, not both.\",\n });\n }\n\n if (args.edit && args.query != null) {\n ctx.addIssue({\n code: \"custom\",\n path: [\"edit\"],\n message: \"Pass only one of --edit, -q/--query, or -f/--file.\",\n });\n }\n\n if (args.edit && args.file != null) {\n ctx.addIssue({\n code: \"custom\",\n path: [\"edit\"],\n message: \"Pass only one of --edit, -q/--query, or -f/--file.\",\n });\n }\n })\n .strict(),\n run: withCommonArgs(async (args) => {\n const mode = await resolveQueryCommandInput({\n query: args.query,\n file: args.file,\n edit: args.edit,\n engine: args.engine,\n });\n\n const sharedOptions: QueryBaseOptions = {\n workspaceId: args[\"workspace-id\"],\n profile: args.profile,\n configPath: args.config,\n engine: args.engine,\n machineUser: args.machineuser,\n };\n\n if (mode.mode === \"abort\") {\n logger.info(\"Editor closed without a query. Nothing was executed.\");\n return;\n }\n\n if (mode.mode === \"repl\") {\n await runRepl({\n ...sharedOptions,\n json: args.json,\n });\n return;\n }\n\n const directQuery = mode.query;\n\n if (args.engine === \"sql\") {\n const result = await querySql({\n ...sharedOptions,\n query: directQuery,\n });\n printSqlResult(result, { json: args.json });\n return;\n }\n\n const result = await queryGql({\n ...sharedOptions,\n query: directQuery,\n });\n printGqlResult(result, { json: args.json });\n }),\n});\n\nfunction isSQLExecutionResult(value: unknown): value is SQLExecutionResult {\n if (!value || typeof value !== \"object\") {\n return false;\n }\n\n const candidate = value as Partial<SQLExecutionResult>;\n return Array.isArray(candidate.rows) && typeof candidate.rowCount === \"number\";\n}\n\nfunction printSingleSqlResult(\n execResult: SQLExecutionResult,\n options: { json?: boolean } = {},\n): void {\n if (execResult.rows.length === 0) {\n if (options.json) {\n logger.out({ results: [], rowCount: 0 });\n return;\n }\n logger.info(\"No rows returned.\");\n return;\n }\n\n if (options.json) {\n logger.out({ results: execResult.rows, rowCount: execResult.rowCount });\n return;\n }\n\n logger.out(execResult.rows, { showNull: true });\n logger.out(`rows: ${execResult.rowCount}`);\n}\n\nfunction splitSqlStatements(query: string): string[] {\n try {\n const statements = parseSql(query);\n if (statements.length === 0) return [];\n return statements.map((s) => toSql.statement(s));\n } catch {\n const trimmed = query.trim();\n return trimmed.length > 0 ? [trimmed] : [];\n }\n}\n\nfunction isSQLExecutionResultArray(value: unknown): value is SQLExecutionResult[] {\n return Array.isArray(value) && value.length > 0 && value.every(isSQLExecutionResult);\n}\n\nfunction printSqlResult(result: SQLQueryDispatchResult, options: { json?: boolean } = {}): void {\n if (isSQLExecutionResultArray(result.result)) {\n if (options.json) {\n logger.out(result.result.map((r) => ({ results: r.rows, rowCount: r.rowCount })));\n return;\n }\n const queries = splitSqlStatements(result.query);\n for (let i = 0; i < result.result.length; i++) {\n if (i > 0) logger.log(\"\");\n logger.info(queries[i] ?? `Statement ${i + 1}`);\n printSingleSqlResult(result.result[i], options);\n }\n return;\n }\n\n if (isSQLExecutionResult(result.result)) {\n printSingleSqlResult(result.result, options);\n return;\n }\n\n logger.out({\n engine: result.engine,\n query: result.query,\n result: result.result,\n });\n}\n\nfunction printGqlResult(result: GQLQueryDispatchResult, options: { json?: boolean } = {}): void {\n if (options.json) {\n logger.out({\n result: result.result,\n });\n return;\n }\n\n logger.out(JSON.stringify(result.result, null, 2));\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqCA,SAAS,YAAY,OAAyB;CAC5C,MAAM,QAAkB,CACtB,MAAM,IAAI,QAAQ,MAAM,OAAO,KAAK,MAAM,KAAK,KAAK,GAAG,IAAI,MAAM,UAAU,CAC5E;AAED,KAAI,MAAM,QACR,OAAM,KAAK,OAAO,MAAM,KAAK,WAAW,CAAC,GAAG,MAAM,UAAU;AAG9D,KAAI,MAAM,WACR,OAAM,KAAK,OAAO,MAAM,KAAK,cAAc,CAAC,GAAG,MAAM,aAAa;AAGpE,KAAI,MAAM,QACR,OAAM,KACJ,OAAO,MAAM,KAAK,QAAQ,CAAC,oBAAoB,MAAM,QAAQ,kCAC9D;AAGH,QAAO,MAAM,KAAK,GAAG;;;;;;;AAQvB,SAAS,eAAe,SAAoC;CAC1D,MAAM,QAAQ,IAAI,MAAM,QAAQ,QAAQ;AACxC,OAAM,OAAO;AACb,OAAM,OAAO,QAAQ;AACrB,OAAM,UAAU,QAAQ;AACxB,OAAM,aAAa,QAAQ;AAC3B,OAAM,UAAU,QAAQ;AACxB,OAAM,eAAe,YAAY,MAAM;AACvC,QAAO;;;;;;;AAQT,SAAgB,WAAW,OAAmC;AAC5D,QAAO,iBAAiB,SAAS,MAAM,SAAS;;;;;AChElD,MAAM,WAAyC;CAC7C,IAAI;CACJ,GAAG;CACH,GAAG,KAAK;CACT;AAED,MAAM,kBAAkB;;;;;AAMxB,MAAa,cAAc,EACxB,QAAQ,CACR,QAAQ,QAAQ,gBAAgB,KAAK,IAAI,EAAE,EAC1C,SAAS,iEACV,CAAC,CACD,QACE,QAAQ;CACP,MAAM,QAAQ,IAAI,MAAM,gBAAgB;AACxC,QAAO,SAAS,MAAM,IAAI,GAAG,GAAG;GAElC,EAAE,SAAS,mCAAmC,CAC/C;;;;;;AAOH,SAAgB,cAAc,UAA0B;CACtD,MAAM,QAAQ,SAAS,MAAM,gBAAgB;AAG7C,QAFc,SAAS,MAAM,IAAI,GAAG,GAErB,SADF,MAAM;;;;;;AAQrB,MAAa,iBAAiB,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC,UAAU;;;;;;;;;;;AAkBhE,SAAgB,aAAa,UAAsB,kBAAoC;CAErF,MAAM,kBAAkB,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,CAAC;CAEzD,MAAM,QAAQ,OAAmB,aAAsB;AACrD,OAAK,MAAM,QAAQ,CAAC,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE;GACvC,MAAM,UAAU,KAAK,QAAQ,QAAQ,KAAK,EAAE,KAAK;AACjD,OAAI,CAACA,KAAG,WAAW,QAAQ,EAAE;AAC3B,QAAI,SACF,OAAM,IAAI,MAAM,+BAA+B,UAAU;AAE3D;;GAGF,MAAM,SAAS,SADCA,KAAG,aAAa,SAAS,QAAQ,CACjB;AAChC,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,EAAE;AAEjD,QAAI,gBAAgB,IAAI,IAAI,CAC1B;AAGF,YAAQ,IAAI,OAAO;;;;AAKzB,MAAK,UAAU,KAAK;AACpB,MAAK,kBAAkB,MAAM;;;;;;;;;AAc/B,MAAa,aAAa;CACxB,YAAY,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;EACrC,OAAO;EACP,aAAa;EACb,YAAY;GAAE,MAAM;GAAQ,SAAS,CAAC,UAAU,OAAO;GAAE;EAC1D,CAAC;CACF,sBAAsB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;EAC/C,aAAa;EACb,YAAY;GAAE,MAAM;GAAQ,SAAS,CAAC,UAAU,OAAO;GAAE;EAC1D,CAAC;CACF,SAAS,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE,EACvC,aAAa,0BACd,CAAC;CACH;;;;AAKD,MAAa,gBAAgB;CAC3B,gBAAgB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;EACzC,OAAO;EACP,aAAa;EACb,YAAY,EAAE,MAAM,QAAQ;EAC7B,CAAC;CACF,SAAS,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;EAClC,OAAO;EACP,aAAa;EACb,YAAY,EAAE,MAAM,QAAQ;EAC7B,CAAC;CACH;;;;AAKD,MAAa,YAAY,EACvB,QAAQ,IAAI,EAAE,QAAQ,CAAC,QAAQ,mBAAmB,EAAE;CAClD,OAAO;CACP,aAAa;CACb,YAAY;EAAE,MAAM;EAAQ,YAAY,CAAC,KAAK;EAAE;CACjD,CAAC,EACH;;;;AAKD,MAAa,iBAAiB;CAC5B,GAAG;CACH,GAAG;CACJ;;;;AAKD,MAAa,mBAAmB,EAC9B,KAAK,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;CACnC,OAAO;CACP,aAAa;CACd,CAAC,EACH;;;;AAKD,MAAa,WAAW,EACtB,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;CACpC,OAAO;CACP,aAAa;CACd,CAAC,EACH;;;;;;;;;;AAaD,MAAa,kBACgB,YAC3B,OAAO,SAAY;AACjB,KAAI;AAEF,MAAI,UAAU,QAAQ,OAAO,KAAK,SAAS,UACzC,QAAO,WAAW,KAAK;AAIzB,eAAa,KAAK,aAA2B,KAAK,sBAAoC;EAGtF,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,QAAM,eAAe;AAErB,QAAM,QAAQ,KAAK;UACZ,OAAO;AACd,MAAI,WAAW,MAAM,EAAE;AACrB,UAAO,IAAI,MAAM,QAAQ,CAAC;AAC1B,OAAI,KAAK,WAAW,MAAM,MACxB,QAAO,MAAM,mBAAmB,MAAM,QAAQ;aAEvC,iBAAiB,OAAO;AACjC,UAAO,MAAM,MAAM,QAAQ;AAC3B,OAAI,KAAK,WAAW,MAAM,MACxB,QAAO,MAAM,mBAAmB,MAAM,QAAQ;QAGhD,QAAO,MAAM,kBAAkB,QAAQ;AAEzC,UAAQ,KAAK,EAAE;WACP;EAER,MAAM,EAAE,sBAAsB,MAAM,OAAO;AAC3C,QAAM,mBAAmB;;AAE3B,SAAQ,KAAK,EAAE;;;;;;;;;;;ACnNnB,eAAsB,QAAQ,SAAiD;CAC7E,MAAM,cAAc,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC;CAGF,IAAI;AACJ,KAAI,QAAQ,SAAS,SAAS,IAAI,CAChC,gBAAe,QAAQ;KAGvB,gBAAe,6BAA6B,QAAQ;CAItD,MAAM,MAAM,IAAI,IAAI,cAAc,gBAAgB;CAGlD,MAAM,WAAW,MAAM,MAAM,IAAI,UAAU,EAAE;EAC3C,QAAQ;EACR,SAAS;GACP,gBAAgB;GAChB,eAAe,UAAU;GACzB,cAAc,MAAM,WAAW;GAChC;EACD,MAAM,QAAQ,QAAQ;EACvB,CAAC;CAEF,MAAM,OAAO,MAAM,SAAS,MAAM;AAElC,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,oBAAoB,SAAS,OAAO,KAAK,KAAK,UAAU,KAAK,GAAG;AAGlF,QAAO;EACL,QAAQ,SAAS;EACjB;EACD;;AAGH,MAAa,aAAa,cAAc;CACtC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,CAAC,QAAQ,KAAK,EAAE;GAClC,OAAO;GACP,aAAa;GACd,CAAC;EACF,UAAU,IAAI,EAAE,QAAQ,EAAE;GACxB,YAAY;GACZ,aACE;GACH,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,SAAS,MAAM,QAAQ;GAC3B,SAAS,KAAK;GACd,UAAU,KAAK;GACf,MAAM,KAAK;GACZ,CAAC;AAEF,MAAI,KAAK,KACP,QAAO,IAAI,KAAK,UAAU,OAAO,MAAM,MAAM,EAAE,CAAC;MAEhD,QAAO,IAAI,KAAK,UAAU,OAAO,MAAM,MAAM,EAAE,CAAC;GAElD;CACH,CAAC;;;;AC/FF,MAAM,wBAAwB,EAAE,OAAO;CACrC,YAAY,EAAE,QAAQ;CACtB,aAAa,EAAE,QAAQ;CACxB,CAAC;AAEF,MAAM,mBAAmB,EAAE,OAAO;CAChC,MAAM,EAAE,QAAQ,SAAS;CACzB,WAAW,EAAE,QAAQ;CACrB,iBAAiB,EAAE,MAAM,EAAE,QAAQ,CAAC;CACpC,aAAa,EAAE,MAAM,sBAAsB;CAC3C,WAAW,EAAE,QAAQ;CACtB,CAAC;AAEF,MAAM,sBAAsB,EAAE,OAAO;CACnC,SAAS,EAAE,QAAQ,EAAE;CACrB,YAAY,EAAE,QAAQ;CACtB,cAAc,EAAE,QAAQ,CAAC,UAAU;CACnC,SAAS,EAAE,OAAO,EAAE,QAAQ,EAAE,iBAAiB;CAChD,CAAC;;;;ACSF,MAAM,oBAAoB;AAC1B,MAAM,cAAc;;;;;;AAOpB,SAAS,iBAAiB,QAAiC;CAEzD,IAAI,iBAAmD;CAEvD,SAAS,eAAuB;AAC9B,SAAO,KAAK,KAAK,OAAO,UAAU,kBAAkB;;CAGtD,SAAS,aAAqB;AAC5B,SAAO,KAAK,KAAK,OAAO,UAAU,YAAY;;CAGhD,SAAS,WAAW,UAA0B;AAC5C,SAAO,KAAK,KAAK,YAAY,EAAE,GAAG,SAAS,WAAW,KAAK,IAAI,CAAC,KAAK;;CAGvE,SAAS,eAA0C;AACjD,MAAI;GACF,MAAM,MAAMC,KAAG,aAAa,cAAc,EAAE,QAAQ;GACpD,MAAM,SAAS,oBAAoB,UAAU,KAAK,MAAM,IAAI,CAAC;AAE7D,OAAI,CAAC,OAAO,SAAS;AACnB,qBAAiB;AACjB;;AAGF,oBAAiB,OAAO;AACxB,UAAO;UACD;AAEN,oBAAiB;AACjB;;;CAIJ,SAAS,qBAAgD;AACvD,MAAI,mBAAmB,KACrB,eAAc;AAEhB,SAAO,kBAAkB;;CAG3B,SAAS,uBAAsC;AAC7C,MAAI,mBAAmB,KACrB,eAAc;AAEhB,MAAI,kBAAkB,KACpB,kBAAiB;GACf,SAAS;GACT,YAAY;GACZ,SAAS,EAAE;GACZ;AAEH,SAAO;;CAGT,SAAS,aAAa,UAA+B;AACnD,OAAG,UAAU,OAAO,UAAU,EAAE,WAAW,MAAM,CAAC;EAElD,MAAM,SAAS,cAAc;EAC7B,MAAM,UAAU,KAAK,KAAK,OAAO,UAAU,aAAa,QAAQ,IAAI,MAAM;AAG1E,MAAI;AACF,QAAG,cAAc,SAAS,KAAK,UAAU,UAAU,MAAM,EAAE,EAAE,QAAQ;AACrE,QAAG,WAAW,SAAS,OAAO;WACvB,GAAG;AACV,OAAI;AACF,SAAG,OAAO,SAAS,EAAE,OAAO,MAAM,CAAC;WAC7B;AAGR,SAAM;;AAGR,mBAAiB;;CAGnB,SAAS,SAAS,KAAqC;AAErD,SADiB,sBAAsB,CACvB,QAAQ;;CAG1B,SAAS,SAAS,KAAa,OAAyB;EACtD,MAAM,WAAW,sBAAsB;AACvC,WAAS,QAAQ,OAAO;;CAG1B,SAAS,YAAY,KAAmB;EACtC,MAAM,WAAW,sBAAsB;AAEvC,SAAO,SAAS,QAAQ;;CAG1B,SAAS,kBAAkB,UAAkB,YAA0B;EACrE,MAAM,MAAM,YAAY;AACxB,OAAG,UAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AACtC,OAAG,aAAa,YAAY,WAAW,SAAS,CAAC;EAEjD,MAAM,YAAY,GAAG,WAAW;EAChC,MAAM,gBAAgB,GAAG,WAAW,SAAS,CAAC;AAC9C,MAAIA,KAAG,WAAW,UAAU,CAC1B,MAAG,aAAa,WAAW,cAAc;MAEzC,MAAG,OAAO,eAAe,EAAE,OAAO,MAAM,CAAC;;CAI7C,SAAS,oBAAoB,UAAkB,YAA6B;EAC1E,MAAM,SAAS,WAAW,SAAS;EACnC,MAAM,YAAY,KAAK,QAAQ,WAAW;AAC1C,OAAG,UAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAC5C,MAAI;AACF,QAAG,aAAa,QAAQ,WAAW;WAC5B,GAAG;AACV,OAAK,EAA4B,SAAS,SAAU,QAAO;AAC3D,SAAM;;EAGR,MAAM,YAAY,GAAG,OAAO;AAC5B,MAAIA,KAAG,WAAW,UAAU,CAC1B,MAAG,aAAa,WAAW,GAAG,WAAW,MAAM;AAGjD,SAAO;;CAGT,SAAS,QAAc;AACrB,OAAG,OAAO,OAAO,UAAU;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAC5D,mBAAiB;;AAGnB,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD;;;;;;;;;;AChJH,SAAS,mBAAmB,SAA4C;AAGtE,KAAI,EAFY,QAAQ,WAAW,MAGjC,QAAO;EACL,SAAS;EACT,aAAa;GACX,aAAa;AACX,WAAO;;GAET,OAAO;GAGR;EACD,WAAW;EAGZ;CAKH,MAAM,QAAQ,iBAAiB,EAAE,UAFhB,QAAQ,YAAY,KAAK,QAAQ,YAAY,EAAE,QAAQ,EAE7B,CAAC;CAG5C,MAAM,mBAAmB,MAAM,cAAc;AAC7C,KAAI,kBACF;MAAI,iBAAiB,eAAe,QAAQ,YAAY;AACtD,UAAO,MACL,+CAA+C,iBAAiB,WAAW,MAAM,QAAQ,aAC1F;AACD,SAAM,OAAO;aACJ,iBAAiB,iBAAiB,QAAQ,cAAc;AACjE,UAAO,MAAM,sCAAsC;AACnD,SAAM,OAAO;;;AAMjB,QAAO;EACL,SAAS;EACT,aAJkB,kBAAkB,MAAM;EAK1C,WAAW;GAET,MAAM,WAAW,MAAM,oBAAoB,IAAI;IAC7C,SAAS;IACT,YAAY,QAAQ;IACpB,cAAc,QAAQ;IACtB,SAAS,EAAE;IACZ;AACD,YAAS,aAAa,QAAQ;AAC9B,YAAS,eAAe,QAAQ;AAChC,SAAM,aAAa,SAAS;;EAE/B;;;;;;;;;;;AC1DH,SAAgB,4BAA4B,QAAwC;AAClF,QAAO,4BAA4B,OAAO;;;;;;;;;AAU5C,SAAgB,uBACd,cACA,eACA,KACQ;CAGR,MAAM,YAAY,eACd,OAAO,QAAQ,aAAa,CACzB,KAAK,CAAC,KAAK,WAAW,OAAO,IAAI,IAAI,MAAM,GAAG,CAC9C,KAAK,KAAK,GACb;CAEJ,MAAM,UACJ,CAAC,gBAAgB,OAAO,KAAK,aAAa,CAAC,WAAW,IAClD,OACA;EACN,UAAU;;CAOV,MAAM,WAAW;gBAHA,gBAAgB,IAAI,cAAc,UAAU,SAAS,CAAC,KAAK,KAAK,CAAC,KAAK,KAIhE;;CAIvB,MAAM,YAAY,MACd,OAAO,QAAQ,IAAI,CAChB,KAAK,CAAC,KAAK,WAAW;AAErB,SAAO,OAAO,IAAI,IADA,OAAO,UAAU,WAAW,IAAI,MAAM,KAAK,OAAO,MAAM,CAC1C;GAChC,CACD,KAAK,KAAK,GACb;AASJ,QAAO,EAAY;;;;;;2BAMM,QAAQ;4BACP,SAAS;kBAbjC,CAAC,OAAO,OAAO,KAAK,IAAI,CAAC,WAAW,IAChC,OACA;EACN,UAAU;KAWc;;;;;;;AAQ1B,SAAS,4BAA4B,QAAwC;CAC3E,MAAM,OAAO,OAAO;AACpB,KAAI,CAAC,QAAQ,OAAO,SAAS,SAC3B,QAAO,EAAE;CAGX,MAAM,sBAAsB,UAAuC;EACjE,MAAM,OAAO,OAAO;EACpB,MAAM,WAAW,OAAO;AAGxB,MAAI,CAAC,SACH,QAAO;EAGT,IAAI,UAAU;AAEd,MAAI,SAAS,UACX,WAAU;WACD,SAAS,UAAU,SAAS,cAErC,WAAU,SAAS,cAAc,KAAK,MAAM,IAAI,EAAE,MAAM,GAAG,CAAC,KAAK,MAAM;AAIzE,MAAI,SAAS,MACX,YAAW;AAGb,SAAO;;AAIT,KAAI,iBAAiB,MAAM;EACzB,MAAM,cACJ,KASA;EAEF,MAAM,aAAa,aAAa;EAChC,MAAM,SAAS,aAAa,MAAM;EAClC,MAAM,gBAAgB,aAAa;AAUnC,SAAO;GACL,cARmD,aACjD,OAAO,KAAK,WAAW,CAAC,QAAQ,KAAK,QAAQ;AAC3C,QAAI,OAAO,mBAAmB,SAAS,KAAK;AAC5C,WAAO;MACN,EAAE,CAAuB,GAC5B;GAIF;GACD;;AAGH,KAAI,2BAA2B,MAAM;EACnC,MAAM,wBACJ,KAGA;AAEF,MAAI,CAAC,sBACH,QAAO,EAAE;AAQX,SAAO,EACL,cANmB,OAAO,QAAQ,sBAAsB,CAAC,QAAQ,KAAK,CAAC,KAAK,WAAW;AACvF,OAAI,OAAO,mBAAmB,MAAM;AACpC,UAAO;KACN,EAAE,CAAuB,EAI3B;;AAGH,QAAO,EAAE;;;;;;;AAQX,SAAS,0BAA0B,YAA4B;AAC7D,QAAO,KAAK,KAAK,KAAK,QAAQ,KAAK,QAAQ,WAAW,CAAC,EAAE,cAAc;;;;;;;AAkBzE,eAAsB,kBAAkB,SAAkD;CACxF,MAAM,EAAE,QAAQ,eAAe;AAC/B,KAAI;EACF,MAAM,EAAE,cAAc,kBAAkB,4BAA4B,OAAO;AAC3E,MAAI,CAAC,gBAAgB,CAAC,cACpB,QAAO,KAAK,wCAAwC,EAAE,MAAM,SAAS,CAAC;AAGxE,MAAI,aACF,QAAO,MAAM,2BAA2B,KAAK,UAAU,aAAa,GAAG;AAEzE,MAAI,cACF,QAAO,MAAM,4BAA4B,KAAK,UAAU,cAAc,GAAG;EAG3E,MAAM,MAAM,OAAO;AACnB,MAAI,IACF,QAAO,MAAM,kBAAkB,KAAK,UAAU,IAAI,GAAG;EAIvD,MAAM,iBAAiB,uBAAuB,cAAc,eAAe,IAAI;EAC/E,MAAM,aAAa,0BAA0B,WAAW;AAGxD,OAAG,UAAU,KAAK,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AAC3D,OAAG,cAAc,YAAY,eAAe;EAC5C,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,WAAW;AAC7D,SAAO,SAAS;AAChB,SAAO,QAAQ,+BAA+B,gBAAgB,EAC5D,MAAM,SACP,CAAC;UACK,OAAO;AACd,SAAO,MAAM,yBAAyB;AACtC,SAAO,MAAM,OAAO,MAAM,CAAC;;;;;;;;;;;;;;ACzH/B,SAAgB,gCAAgC,QAIxB;CACtB,MAAM,uBAAO,IAAI,KAAqB;AACtC,KAAI,OAAO,gBACT,MAAK,IAAI,WAAW;AAEtB,KAAI,OAAO,gBACT,MAAK,IAAI,WAAW;AAEtB,KAAI,OAAO,gBACT,MAAK,IAAI,WAAW;AAEtB,QAAO;;;;;;;;;;AAWT,SAAgB,mBAAmB,QAIvB;AACV,QAAO,CAAC,EAAE,OAAO,mBAAmB,OAAO,mBAAmB,OAAO;;;;;;;;AC1EvE,IAAa,gBAAb,MAA2B;CACzB,AAAQ,0BAA+B,IAAI,KAAK;CAChD,AAAQ,qBAA2C,EAAE;CACrD,AAAQ,iBAA4C,EAAE;CACtD,AAAQ,6CAA0C,IAAI,KAAK;CAC3D,AAAQ,iDAA8C,IAAI,KAAK;;CAG/D,AAAQ,sBAAgC,EAAE;CAE1C,YAAY,UAAoB,EAAE,EAAE;AAClC,OAAK,MAAM,UAAU,SAAS;AAC5B,OAAI,KAAK,QAAQ,IAAI,OAAO,GAAG,CAC7B,OAAM,IAAI,MACR,wBAAwB,OAAO,GAAG,gDACnC;AAEH,QAAK,QAAQ,IAAI,OAAO,IAAI,OAAO;;;;;;;;;CAUvC,MAAM,kBAAkB,SAAqE;EAC3F,MAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ,SAAS;AACjD,MAAI,CAAC,OACH,QAAO;GACL,SAAS;GACT,OAAO,WAAW,QAAQ,SAAS;GACpC;EAGH,MAAM,qBAAqB,OAAO;AAKlC,OAHE,OAAO,uBAAuB,aAC1B,mBAAmB,OAAO,aAAa,GACvC,uBAAuB,UACJ,QAAQ,eAAe,UAAa,QAAQ,eAAe,MAClF,QAAO;GACL,SAAS;GACT,OAAO,WAAW,OAAO,GAAG,yDAAyD,QAAQ,KAAK,KAAK;GACxG;AAIH,MAAI,CAAC,OAAO,aACV,QAAO;GACL,SAAS;GACT,OAAO,WAAW,OAAO,GAAG;GAC7B;EAIH,IAAI;AACJ,MAAI;AACF,YAAS,MAAM,OAAO,aAAa;IACjC,MAAM,QAAQ;IACd,YAAY,QAAQ;IACpB,cAAc,OAAO;IACrB,WAAW,QAAQ;IACpB,CAAC;WACK,OAAO;GACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,UAAO;IACL,SAAS;IACT,OAAO,WAAW,OAAO,GAAG,0CAA0C,QAAQ,KAAK,KAAK,KAAK;IAC9F;;AAIH,MAAI,OAAO,SAAS,OAAO,KAAK,OAAO,MAAM,CAAC,SAAS,GAAG;GAExD,MAAM,aAAa,OAAO;AAC1B,QAAK,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ,OAAO,MAAM,CACrD,MAAK,eAAe,KAAK;IACvB,UAAU,QAAQ;IAClB,kBAAkB;IAClB,gBAAgB,QAAQ,KAAK;IAC7B;IACA;IACA,WAAW,QAAQ;IACnB,cAAc,OAAO;IACtB,CAAC;;AAKN,MAAI,OAAO,aAAa,OAAO,UAAU,SAAS,EAChD,MAAK,MAAM,YAAY,OAAO,UAC5B,MAAK,mBAAmB,KAAK;GAC3B;GACA,UAAU,QAAQ;GAClB,WAAW,QAAQ;GACnB,gBAAgB,QAAQ,KAAK;GAC9B,CAAC;AAIN,SAAO;GAAE,SAAS;GAAM;GAAQ;;;;;;;;CASlC,MAAM,wBACJ,WACuF;EACvF,MAAM,UACJ,EAAE;AAEJ,OAAK,MAAM,CAAC,UAAU,WAAW,KAAK,SAAS;AAE7C,OAAI,CAAC,OAAO,kBACV;GAIF,MAAM,SAAS,OAAO;GAGtB,MAAM,UAAyC;IAC7C,cAAc;IACd;IACD;GAED,IAAI;AACJ,OAAI;AACF,aAAS,MAAM,OAAO,kBAAkB,QAAQ;YACzC,OAAO;IACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,YAAQ,KAAK;KACX;KACA;KACA,QAAQ;MACN,SAAS;MACT,OAAO,WAAW,OAAO,GAAG,oDAAoD,UAAU,KAAK;MAChG;KACF,CAAC;AACF;;AAIF,OAAI,OAAO,aAAa,OAAO,UAAU,SAAS,EAChD,MAAK,MAAM,YAAY,OAAO,WAAW;IACvC,MAAM,cAAc,GAAG,SAAS,GAAG,SAAS;AAC5C,QAAI,KAAK,+BAA+B,IAAI,YAAY,CACtD;AAEF,SAAK,+BAA+B,IAAI,YAAY;AACpD,SAAK,mBAAmB,KAAK;KAC3B;KACA;KACA;KACD,CAAC;;AAKN,OAAI,OAAO,SAAS,OAAO,KAAK,OAAO,MAAM,CAAC,SAAS,GAAG;IAExD,MAAM,aAAa,OAAO;AAC1B,SAAK,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ,OAAO,MAAM,EAAE;KACvD,MAAM,UAAU,GAAG,SAAS,GAAG,KAAK,GAAG,KAAK;AAC5C,SAAI,KAAK,2BAA2B,IAAI,QAAQ,CAC9C;AAEF,UAAK,2BAA2B,IAAI,QAAQ;AAC5C,UAAK,eAAe,KAAK;MACvB;MACA,kBAAkB;MAClB,gBAAgB;MAChB;MACA;MACA;MACA,cAAc,OAAO;MACtB,CAAC;;;AAIN,WAAQ,KAAK;IACX;IACA;IACA,QAAQ;KAAE,SAAS;KAAM;KAAQ;IAClC,CAAC;;AAGJ,SAAO;;;;;;CAOT,wBAAkC;AAChC,SAAO,MAAM,KAAK,KAAK,QAAQ,SAAS,CAAC,CACtC,QAAQ,GAAG,YAAY,OAAO,sBAAsB,OAAU,CAC9D,KAAK,CAAC,QAAQ,GAAG;;;;;;CAOtB,IAAI,cAAsB;AACxB,SAAO,KAAK,QAAQ;;;;;;;CAQtB,UAAU,UAAsC;AAC9C,SAAO,KAAK,QAAQ,IAAI,SAAS;;;;;;;CAQnC,oBAAoB,UAAsC;AACxD,SAAO,KAAK,QAAQ,IAAI,SAAS,EAAE;;;;;;CAOrC,8BAAiE;AAC/D,SAAO,KAAK;;;;;;CAOd,4CAAuF;AACrF,SAAO,KAAK,mBAAmB,KAAK,UAAU;GAC5C,GAAG;GACH,kBAAkB,KAAK,oBAAoB,KAAK,SAAS,IAAI;GAC9D,EAAE;;;;;;CAOL,0BAAkE;AAChE,SAAO,KAAK;;;;;;;CAQd,wCAAwC,WAAsD;AAC5F,SAAO,KAAK,mBAAmB,QAAQ,SAAS,KAAK,cAAc,UAAU;;;;;;CAO/E,gCAA0C;AACxC,SAAO,MAAM,KAAK,KAAK,QAAQ,QAAQ,CAAC,CAAC,QAAQ,WAAW,mBAAmB,OAAO,CAAC;;;;;;;CAQzF,gCAAgC,UAAuC;EACrE,MAAM,SAAS,KAAK,QAAQ,IAAI,SAAS;AACzC,MAAI,CAAC,OAAQ,wBAAO,IAAI,KAAK;AAC7B,SAAO,gCAAgC,OAAO;;;;;;;CAQhD,oBAAoB,QAA6C;EAC/D,MAAM,EAAE,WAAW,mBAAmB,YAAY,eAAe,sBAAsB;EAGvF,MAAM,uBAAuB,cAAc,KAAK,gBAAgB,UAAU;AAI1E,OAAK,sBAAsB,kBADH,KAAK,2CAA2C,EAGtE,WACA,sBACA,mBACA,WACD;AAED,SAAO,KAAK;;;;;;;;CASd,WAAW,QAA2C;EACpD,MAAM,EAAE,cAAc,cAAc,aAAa;EACjD,MAAM,qBAAqB,OAAO,KAAK,aAAa,OAAO;EAE3D,MAAM,kBADgB,OAAO,KAAK,aAAa,CACT,QAAQ,SAAS,mBAAmB,SAAS,KAAK,CAAC;AAEzF,MAAI,gBAAgB,SAAS,EAC3B,OAAM,IAAI,MACR,WAAW,SAAS,wDAAwD,aAAa,KAAK,KAAK,gBAAgB,KAAK,KAAK,CAAC,kDAE/H;EAQH,MAAM,EAAE,IAAI,KAAK,GAAG,oBALC;GACnB,GAAG,aAAa;GAChB,GAAG;GACJ;EAGD,MAAM,aAAa,aAAa,SAAS,UAAU;EACnD,MAAM,WAAW,aACZ,CAAC,aAAa,MAAM,WAAW,GAChC,aAAa;AAEjB,SAAO,2BAA2B,cADb,GAAG,KAAK,UAAU,gBAAgB,CACM;;;;;;;;;;AAkEjE,SAAS,2BACP,UACA,UACiB;CACjB,IAAI,SAAS;AAGb,KAAI,SAAS,aACX,UAAS,OAAO,YAAY,SAAS,aAAa;CAIpD,MAAM,WAAW,SAAS;AAC1B,KAAI,SAAS,SAAS,OAAO,KAAK,SAAS,MAAM,CAAC,SAAS,EACzD,UAAS,OAAO,MAAM,SAAS,MAAM;AAIvC,KAAI,SAAS,UAAU;EACrB,MAAM,EAAE,YAAY,aAAa,GAAG,aAAa,SAAS;AAC1D,MAAI,OAAO,KAAK,SAAS,CAAC,SAAS,EACjC,UAAS,OAAO,SACd,SACD;;AAOL,KAAI,SAAS,aAAa,OACxB,UAAS,OAAO,WAAW,SAAS,YAAY,OAA+B;AAEjF,KAAI,SAAS,aAAa,IACxB,UAAS,OAAO,cAAc,SAAS,YAAY,IAA+B;AAIpF,KAAI,SAAS,WAAW,OAAO,KAAK,SAAS,QAAQ,CAAC,SAAS,GAAG;EAChE,MAAM,YAAY,OAAO,QAAQ,SAAS,QAAQ,CAAC,KAAK,CAAC,MAAM,UAAU;GACvE;GAEA,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACb,EAAE;AACH,WAAS,OAAO,QAAQ,GAAG,UAAU;;AAIvC,KAAI,SAAS,WAAW,SAAS,QAAQ,SAAS,EAChD,MAAK,MAAM,UAAU,SAAS,QAE5B,UAAS,OAAO,OAAO,GACpB,OAAO,WAAW,OAAO,QAC3B,CAAwC;AAI7C,QAAO;;;;;;;;;;AC7gBT,SAAgB,gBAKd,OAAsC;CACtC,MAAM,UAAe,EAAE;CACvB,MAAM,UAAe,EAAE;CACvB,MAAM,UAAe,EAAE;CACvB,MAAM,WAAgB,EAAE;CAExB,MAAM,gBACJ,QAAQ,WAAW,KAAK,QAAQ,WAAW,KAAK,QAAQ,WAAW,KAAK,SAAS,WAAW;AAE9F,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA,aAAa;AACX,OAAI,SAAS,CACX;AAEF,UAAO,IAAI,OAAO,KAAK,GAAG,MAAM,GAAG,CAAC;AACpC,WAAQ,SAAS,SAAS,OAAO,IAAI,KAAK,QAAQ,OAAO,GAAG,KAAK,OAAO,CAAC;AACzE,WAAQ,SAAS,SAAS,OAAO,IAAI,KAAK,QAAQ,OAAO,GAAG,KAAK,OAAO,CAAC;AACzE,WAAQ,SAAS,SAAS,OAAO,IAAI,KAAK,QAAQ,OAAO,GAAG,KAAK,OAAO,CAAC;AACzE,YAAS,SAAS,SAAS,OAAO,IAAI,KAAK,QAAQ,QAAQ,GAAG,KAAK,OAAO,CAAC;;EAE9E;;;;;;;;;;ACrCH,SAAgB,UAAU,aAA6B;AACrD,QAAO,oBAAoB;;AAG7B,MAAa,kBAAkB;;;;;;;;AAS/B,eAAsB,iBACpB,OACA,SACA,gBAC4D;CAC5D,MAAM,cAAc,MAAM,iBAAiB;CAE3C,MAAM,aAAa,YAAY,UAC3B,IAAI,YAAY,QAAQ,QAAQ,OAAO,IAAI,KAC3C;AAEJ,QAAO;EACL;EACA,QAAQ;GACN,GAAI,kBAAkB,EAAE;IACvB,kBAAkB;GACnB,eAAe;GAChB;EACF;;;;;;;;;;;;AC3BH,eAAsB,iBACpB,QACA,WACA,QAAyD,iBACzD;AACA,KAAI,UAAU,gBAEZ,OAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,IAAI,OAAO,aAAW;AACzC,WAAO,QAAQ,OAAO,MAAM,yBAC1B,QACAC,SAAO,QAAQ,aACfA,SAAO,QAAQ,MACf,OACD;AACD,QAAM,OAAO,kBAAkBA,SAAO,QAAQ;AAC9C,QAAM,OAAO,YAAYA,SAAO,YAAY;GAC5C,EACF,GAAG,UAAU,QAAQ,IAAI,OAAO,WAAW;AACzC,SAAO,QAAQ,OAAO,MAAM,yBAC1B,QACA,OAAO,QAAQ,aACf,OAAO,QAAQ,MACf,OACD;AACD,QAAM,OAAO,kBAAkB,OAAO,QAAQ;AAC9C,QAAM,OAAO,YAAY,OAAO,YAAY;GAC5C,CACH,CAAC;UACO,UAAU,SAGnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,IAAI,OAAO,QAAQ;AACnC,QAAM,OAAO,kBAAkB,IAAI,QAAQ;GAC3C,CACH;;AAqBL,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,eAAe;;;;;;;AAQxD,eAAsB,gBAAgB,SAAsB;CAC1D,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,YAAY,gBAChB,eACD;CAED,MAAM,uBAAuB,MAAM,SAAS,OAAO,WAAW,gBAAgB;AAC5E,MAAI;GACF,MAAM,EAAE,cAAc,kBAAkB,MAAM,OAAO,iBAAiB;IACpE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,cAAc,cAAc;WAC7B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;AAEF,KAAI,YAAY;AACd,MAAI,qBAAqB,MAAM,QAAQ,IAAI,SAAS,YAAY,KAAK,CACnE,WAAU,QAAQ,KAAK;GACrB,MAAM,YAAY;GAClB,SAAS;IACP;IACA,iBAAiB,YAAY;IAC9B;GACF,CAAC;AAEJ,YAAU,OAAO;AACjB,SAAO;;AAKT,KAAI,YAAY,UAAU,WAAW,GAAG;AACtC,YAAU,OAAO;AACjB,SAAO;;CAGT,IAAI;CACJ,IAAI;AACJ,KAAI,YAAY,eAAe,YAAY,YAAY,QAAQ;AAC7D,kBAAgB,YAAY,YAAY,OAAO;EAE/C,MAAM,aAAa,YAAY,YAAY,OAAO;AAClD,MAAI,WACF,qBAAoB,WAAW;YAExB,YAAY,OAAO,MAAM;AAElC,kBAAgB,YAAY,OAAO,KAAK;EACxC,MAAM,aAAa,MAAM,SAAS,OAAO,WAAW,gBAAgB;AAClE,OAAI;IACF,MAAM,EAAE,0BAAY,kBAAkB,MAAM,OAAO,mBAAmB;KACpE;KACA,eAAe;KACf;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAACC,cAAY,cAAc;YAC3B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;AACF,MAAI,WAAW,SAAS,EACtB,qBAAoB,WAAW,GAAG;;CAGtC,MAAM,cAAc,MAAM,iBAAiBD,MAAI,aAAa,YAAY,KAAK,EAAE,YAAY,KAAK;AAEhG,KAAI,qBAAqB,MAAM,QAAQ,IAAI,SAAS,YAAY,KAAK,CACnE,WAAU,QAAQ,KAAK;EACrB,MAAM,YAAY;EAClB,SAAS;GACP;GACA,iBAAiB,YAAY;GAC7B;GACA;GACA,MAAM,YAAY,OAAO;GACzB,WAAW,YAAY,UAAU,KAAK,aAAa,cAAc,SAAS,CAAC;GAC3E,oBAAoB,YAAY,OAAO;GACvC,sBAAsB,YAAY,OAAO;GAC1C;EACD;EACD,CAAC;KAEF,WAAU,QAAQ,KAAK;EACrB,MAAM,YAAY;EAClB,SAAS;GACP;GACA,iBAAiB,YAAY;GAC7B;GACA;GACA,MAAM,YAAY,OAAO;GACzB,WAAW,YAAY,UAAU,KAAK,aAAa,cAAc,SAAS,CAAC;GAC3E,oBAAoB,YAAY,OAAO;GACvC,sBAAsB,YAAY,OAAO;GAC1C;EACD;EACD,CAAC;AAGJ,WAAU,OAAO;AACjB,QAAO;;AAGT,SAAS,cACP,UACyC;CAEzC,IAAI;AACJ,SAAQ,SAAS,MAAjB;EACE,KAAK;AACH,iBAAc,qBAAqB;AACnC;EACF,KAAK;AACH,iBAAc,qBAAqB;AACnC;EACF,KAAK;AACH,iBAAc,qBAAqB;AACnC;EACF,KAAK;AACH,iBAAc,qBAAqB;AACnC;EACF,QACE,OAAM,IAAI,MAAM,0BAA0B,SAAS,OAAO;;AAE9D,QAAO;EACL;EACA,kBAAkB,SAAS;EAC5B;;;;;;;;;;;AC3MH,SAAgB,mBAAmB,eAAuB,YAAoB;AAC5E,QAAO,OAAO,cAAc,GAAG;;;;;;;;AASjC,SAAgB,oBAAoB,eAAuB,YAAoB;AAC7E,QAAO,iBAAiB,cAAc,GAAG;;;;;;;;;AAU3C,eAAsB,SACpB,QACA,QACA,QAAuC,iBACvC;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,iBAAiB;AAE7B,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,aAAW;AACjD,SAAM,OAAO,iBAAiBE,SAAO,QAAQ;AAC7C,SAAM,OAAO,YAAYA,SAAO,YAAY;IAC5C,EACF,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,WAAW;AACjD,SAAM,OAAO,iBAAiB,OAAO,QAAQ;AAC7C,SAAM,OAAO,YAAY,OAAO,YAAY;IAC5C,CACH,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,OAAO,QAAQ,IAAI,OAAO,aAAW;GAChD,MAAM,OAAO,MAAM,OAAO,gBAAgBA,SAAO,QAAQ;GAGzD,MAAM,YAAY,mBAChBA,SAAO,QAAQ,eACfA,SAAO,QAAQ,QAAQ,QAAQ,GAChC;GACD,MAAM,aAAa,oBACjBA,SAAO,QAAQ,eACfA,SAAO,QAAQ,QAAQ,QAAQ,GAChC;AACD,SAAM,OAAO,yBAAyB;IACpC,aAAaA,SAAO,QAAQ;IAC5B,wBAAwB;IACzB,CAAC;AACF,SAAM,OAAO,0BAA0B;IACrC,aAAaA,SAAO,QAAQ;IAC5B,wBAAwB;IACxB,yBAAyB;IACzB,0BAA0B,KAAK,QAAQ;IACxC,CAAC;IACF,EACF,GAAG,UAAU,OAAO,QAAQ,IAAI,OAAO,WAAW;GAEhD,MAAM,YAAY,mBAAmB,OAAO,eAAe,OAAO,KAAK;GACvE,MAAM,aAAa,oBAAoB,OAAO,eAAe,OAAO,KAAK;AACzE,OAAI;AACF,UAAM,OAAO,sBAAsB;KACjC,aAAa,OAAO;KACpB,wBAAwB;KACzB,CAAC;AACF;YACO,OAAO;AACd,QAAI,EAAE,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,UACzD,OAAM;;AAGV,SAAM,OAAO,yBAAyB;IACpC,aAAa,OAAO;IACpB,wBAAwB;IACzB,CAAC;AACF,SAAM,OAAO,0BAA0B;IACrC,aAAa,OAAO;IACpB,wBAAwB;IACxB,yBAAyB;IACzB,0BAA0B,OAAO;IAClC,CAAC;IACF,CACH,CAAC;YACO,UAAU,mBAGnB,OAAM,QAAQ,IACZ,UAAU,OAAO,QAAQ,IAAI,OAAO,QAAQ;AAC1C,QAAM,OAAO,gBAAgB,IAAI,QAAQ;EAGzC,MAAM,YAAY,OAAO,IAAI,QAAQ,cAAc,GAAG,IAAI,QAAQ;AAClE,QAAM,OAAO,yBAAyB;GACpC,aAAa,IAAI,QAAQ;GACzB,wBAAwB;GACzB,CAAC;GACF,CACH;UACQ,UAAU,kBAEnB,OAAM,QAAQ,IAAI,UAAU,QAAQ,QAAQ,KAAK,QAAQ,OAAO,iBAAiB,IAAI,QAAQ,CAAC,CAAC;;;;;;;AASnG,eAAsB,QAAQ,SAAsB;CAClD,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,OAAO,aAAa,EAAE,GAAG,YAAY;CAC3C,MAAM,EACJ,WAAW,kBACX,WACA,WACA,mBACE,MAAMC,eAAa,QAAQ,aAAa,YAAY,MAAM,KAAK;CAEnE,MAAM,kBAAkB,MAAM,YAAY,QAAQ,aAAa,MADvC,iBAAiB,QAAQ,KAAK,QAAQ,IAAI,KAAK,CACc;AAErF,kBAAiB,OAAO;AACxB,iBAAgB,OAAO;AACvB,QAAO;EACL,WAAW;GACT,SAAS;GACT,QAAQ;GACT;EACD;EACA;EACA;EACD;;AAoBH,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,OAAO;;AAGhD,eAAeD,eACb,QACA,aACA,SACA,MACA;CACA,MAAM,YAAY,gBAA6D,eAAe;CAC9F,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAExC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,aAAa,kBAAkB,MAAM,OAAO,gBAAgB;IAClE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,aAAa,cAAc;WAC5B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,mBAA6D,EAAE;AACrE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,MAAI,CAAC,SAAS,WAAW,KACvB;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAKC,MAAI,aAAa,SAAS,UAAU,KAAK,EAC/C,CAAC;AACF,mBAAiB,SAAS,UAAU,QAAQ;GAC1C;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;AAED,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,gBAAgB,IAAI;EAC1B,MAAM,WAAW,iBAAiB;EAClC,MAAM,cAAc,MAAM,iBAAiBA,MAAI,aAAa,cAAc,EAAE,QAAQ;EACpF,IAAI;AACJ,UAAQ,IAAI,eAAZ;GACE,KAAK;AACH,oBAAgB;AAChB;GACF,KAAK;AACH,oBAAgB;AAChB;GACF;AACE,oBAAgB,IAAI,cAAc;AAClC;;EAGJ,MAAM,OAAO,YAAY,IAAI,KAAK;EAClC,MAAM,iBAAiB,IAAI;AAE3B,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,IAAI;IAClB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM;IACN,SAAS;KACP;KACA;KACA;KACA;KACA;KACA,mBAAmB,IAAI;KACvB,sBAAsB,8BAA8B,IAAI,cAAc;KACvE;IACD;IACD,CAAC;AACF,UAAO,iBAAiB;QAExB,WAAU,QAAQ,KAAK;GACrB,MAAM;GACN,SAAS;IACP;IACA;IACA;IACA;IACA;IACA,mBAAmB,IAAI;IACvB,sBAAsB,8BAA8B,IAAI,cAAc;IACvE;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,iBAAiB,CAAC,SAAS,CAAC,mBAAmB;EAC5D,MAAM,QAAQ,iBAAiB,gBAAgB;AAC/C,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB,MAAM;GACN,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;AAoB5D,eAAe,YACb,QACA,aACA,MACA,iBACA;CACA,MAAM,YAAY,gBAA0D,cAAc;CAE1F,MAAM,gBAAgB,kBAA0B;AAC9C,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,SAAS,kBAAkB,MAAM,OAAO,eAAe;KAC7D;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,SAAS,cAAc;YACxB,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;CAGJ,MAAM,eAAe,MAAM,QAAQ,IAAI,KAAK,KAAK,QAAQ,aAAa,IAAI,KAAK,CAAC,CAAC;AACjF,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,MAAM,KAAK;EACjB,MAAM,gBAAgB,IAAI;EAC1B,MAAM,kBAAkB,aAAa;EACrC,MAAM,kCAAkB,IAAI,KAAqB;AACjD,kBAAgB,SAAS,aAAW;AAClC,mBAAgB,IAAIC,SAAO,MAAMA,SAAO,aAAa;IACrD;AACF,OAAK,MAAM,QAAQ,IAAI,QACrB,KAAI,gBAAgB,IAAI,KAAK,EAAE;AAC7B,aAAU,QAAQ,KAAK;IACrB;IACA;IACA;IACA,cAAc,gBAAgB,IAAI,KAAK;IACxC,CAAC;AACF,mBAAgB,OAAO,KAAK;QAE5B,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA;IACA,QAAQ,EACN,MACD;IACF;GACF,CAAC;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA;KACA;KACD;IACF,CAAC;IACF;;CAGJ,MAAM,0BAA0B,MAAM,QAAQ,IAC5C,gBAAgB,KAAK,kBAAkB,aAAa,cAAc,CAAC,CACpE;AACD,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;EAC/C,MAAM,gBAAgB,gBAAgB;AACtC,0BAAwB,GAAG,SAAS,aAAW;AAC7C,aAAU,QAAQ,KAAK;IACrB,MAAMA,SAAO;IACb,SAAS;KACP;KACA;KACA,MAAMA,SAAO;KACd;IACF,CAAC;IACF;;AAEJ,QAAO;;AAGT,SAAS,YAAY,MAAyC;AAC5D,SAAQ,MAAR;EACE,KAAK,KACH,QAAO,QAAQ;EACjB,KAAK,KACH,QAAO,QAAQ;EACjB,QACE,QAAO,QAAQ;;;AAOrB,SAAS,8BACP,eACqC;AACrC,KAAI,CAAC,cACH;AAEF,QAAO;EACL,QAAQ,cAAc,WAAW;EACjC,QAAQ,cAAc,WAAW;EACjC,QAAQ,cAAc,WAAW;EACjC,MAAM,cAAc,SAAS;EAC7B,wBAAwB,cAAc,2BAA2B;EAClE;;;;;;;;;;;;ACjXH,eAAsB,UACpB,QACA,QACA,QAAuC,iBACvC;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,iBAAiB;AAE7B,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,aAAW;AACjD,SAAM,OAAO,kBAAkBC,SAAO,QAAQ;AAC9C,SAAM,OAAO,YAAYA,SAAO,YAAY;IAC5C,EACF,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,WAAW;AACjD,SAAM,OAAO,kBAAkB,OAAO,QAAQ;AAC9C,SAAM,OAAO,YAAY,OAAO,YAAY;IAC5C,CACH,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,UAAU,QAAQ,IAAI,OAAO,aAAW;AACnD,OAAIA,SAAO,UAAU,SAAS,aAC5B,UAAO,QAAQ,UAAW,SAAS,MAAM,sBACvC,QACAA,SAAO,QAAQ,aACfA,SAAO,UACR;AAEH,UAAO,OAAO,oBAAoBA,SAAO,QAAQ;IACjD,EACF,GAAG,UAAU,UAAU,QAAQ,IAAI,OAAO,WAAW;AACnD,OAAI,OAAO,UAAU,SAAS,aAC5B,QAAO,QAAQ,UAAW,SAAS,MAAM,sBACvC,QACA,OAAO,QAAQ,aACf,OAAO,UACR;AAEH,UAAO,OAAO,oBAAoB,OAAO,QAAQ;IACjD,CACH,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,kBAAkB,QAAQ,KAAK,aAC1C,OAAO,wBAAwBA,SAAO,QAAQ,CAC/C,EACD,GAAG,UAAU,kBAAkB,QAAQ,KAAK,WAC1C,OAAO,wBAAwB,OAAO,QAAQ,CAC/C,CACF,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,aAAa,QAAQ,KAAK,aAAW,OAAO,mBAAmBA,SAAO,QAAQ,CAAC,EAC5F,GAAG,UAAU,aAAa,QAAQ,KAAK,WAAW,OAAO,mBAAmB,OAAO,QAAQ,CAAC,CAC7F,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,YAAY,QAAQ,KAAK,aACpC,OAAO,sBAAsBA,SAAO,QAAQ,CAC7C,EACD,GAAG,UAAU,YAAY,QAAQ,KAAK,WACpC,OAAO,sBAAsB,OAAO,QAAQ,CAC7C,CACF,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,aAAa,QAAQ,IAAI,OAAO,aAAW;AACtD,YAAO,QAAQ,aAAc,eAAe,MAAM,yBAChD,QACAA,SAAO,QAAQ,aACfA,SAAO,QAAQ,aAAc,cAC7B,uBACD;AACD,UAAO,OAAO,uBAAuBA,SAAO,QAAQ;IACpD,EACF,GAAG,UAAU,aAAa,QAAQ,IAAI,OAAO,WAAW;AACtD,UAAO,QAAQ,aAAc,eAAe,MAAM,yBAChD,QACA,OAAO,QAAQ,aACf,OAAO,QAAQ,aAAc,cAC7B,uBACD;AACD,UAAO,OAAO,uBAAuB,OAAO,QAAQ;IACpD,CACH,CAAC;AAGF,OAAK,MAAM,WAAW,UAAU,aAAa,UAAU;AACrD,SAAM,OAAO,uBAAuB,QAAQ,cAAc;AAC1D,WAAQ,cAAc,aAAc,eAAe,MAAM,yBACvD,QACA,QAAQ,cAAc,aACtB,QAAQ,cAAc,aAAc,cACpC,uBACD;AACD,SAAM,OAAO,uBAAuB,QAAQ,cAAc;;AAI5D,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,KAAK,QAAQ,KAAK,aAAW,OAAO,qBAAqBA,SAAO,QAAQ,CAAC,EACtF,GAAG,UAAU,KAAK,QAAQ,KAAK,WAAW,OAAO,qBAAqB,OAAO,QAAQ,CAAC,CACvF,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,aAAa,QAAQ,KAAK,aACrC,OAAO,uBAAuBA,SAAO,QAAQ,CAC9C,EACD,GAAG,UAAU,aAAa,QAAQ,KAAK,WACrC,OAAO,uBAAuB,OAAO,QAAQ,CAC9C,CACF,CAAC;YACO,UAAU,oBAAoB;AAGvC,QAAM,QAAQ,IACZ,UAAU,aAAa,QAAQ,KAAK,QAAQ,OAAO,uBAAuB,IAAI,QAAQ,CAAC,CACxF;AAGD,QAAM,QAAQ,IACZ,UAAU,KAAK,QAAQ,KAAK,QAAQ,OAAO,qBAAqB,IAAI,QAAQ,CAAC,CAC9E;AAGD,QAAM,QAAQ,IACZ,UAAU,aAAa,QAAQ,KAAK,QAAQ,OAAO,uBAAuB,IAAI,QAAQ,CAAC,CACxF;AAGD,QAAM,QAAQ,IACZ,UAAU,YAAY,QAAQ,KAAK,QAAQ,OAAO,sBAAsB,IAAI,QAAQ,CAAC,CACtF;AAGD,QAAM,QAAQ,IACZ,UAAU,aAAa,QAAQ,KAAK,QAAQ,OAAO,mBAAmB,IAAI,QAAQ,CAAC,CACpF;AAGD,QAAM,QAAQ,IACZ,UAAU,kBAAkB,QAAQ,KAAK,QAAQ,OAAO,wBAAwB,IAAI,QAAQ,CAAC,CAC9F;AAGD,QAAM,QAAQ,IACZ,UAAU,UAAU,QAAQ,KAAK,QAAQ,OAAO,oBAAoB,IAAI,QAAQ,CAAC,CAClF;YACQ,UAAU,kBAEnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,QAAQ,KAAK,QAAQ,OAAO,kBAAkB,IAAI,QAAQ,CAAC,CAC9E;;;;;;;AASL,eAAsB,SAAS,SAAsB;CACnD,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,QAAiC,EAAE;AACzC,KAAI,CAAC,cAAc,YAAY,aAAa;AAC1C,QAAM,YAAY,YAAY,mBAAmB;AACjD,QAAM,KAAK,YAAY,YAAY;;CAErC,MAAM,EACJ,WAAW,kBACX,WACA,WACA,mBACE,MAAMC,eAAa,QAAQ,aAAa,YAAY,MAAM,MAAM;CACpE,MAAM,kBAAkB,iBAAiB,QAAQ,KAAK,QAAQ,IAAI,KAAK;CACvE,MAAM,CACJ,oBACA,4BACA,uBACA,sBACA,uBACA,eACA,yBACE,MAAM,QAAQ,IAAI;EACpB,eAAe,QAAQ,aAAa,OAAO,gBAAgB;EAC3D,uBAAuB,QAAQ,aAAa,OAAO,gBAAgB;EACnE,kBAAkB,QAAQ,aAAa,OAAO,gBAAgB;EAC9D,iBAAiB,QAAQ,aAAa,OAAO,gBAAgB;EAC7D,kBAAkB,QAAQ,aAAa,OAAO,gBAAgB;EAC9D,gBAAgB,QAAQ,aAAa,OAAO,gBAAgB;EAC5D,kBAAkB,QAAQ,aAAa,OAAO,gBAAgB;EAC/D,CAAC;AAEF,kBAAiB,OAAO;AACxB,oBAAmB,OAAO;AAC1B,4BAA2B,OAAO;AAClC,uBAAsB,OAAO;AAC7B,sBAAqB,OAAO;AAC5B,uBAAsB,OAAO;AAC7B,eAAc,OAAO;AACrB,uBAAsB,OAAO;AAC7B,QAAO;EACL,WAAW;GACT,SAAS;GACT,WAAW;GACX,mBAAmB;GACnB,cAAc;GACd,aAAa;GACb,cAAc;GACd,MAAM;GACN,cAAc;GACf;EACD;EACA;EACA;EACD;;AAoBH,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,QAAQ;;AAGjD,eAAeD,eACb,QACA,aACA,SACA,OACA;CACA,MAAM,YAAY,gBAA6D,gBAAgB;CAC/F,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAExC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,cAAc,kBAAkB,MAAM,OAAO,iBAAiB;IACpE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,cAAc,cAAc;WAC7B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,mBAA6D,EAAE;AACrE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,MAAI,CAAC,SAAS,WAAW,KACvB;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAKC,MAAI,aAAa,SAAS,UAAU,KAAK,EAC/C,CAAC;AACF,mBAAiB,SAAS,UAAU,QAAQ;GAC1C;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;AAED,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,WAAW,iBAAiB,OAAO;EACzC,MAAM,cAAc,MAAM,iBAAiBA,MAAI,aAAa,OAAO,KAAK,EAAE,QAAQ;AAClF,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,OAAO;IACtB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,OAAO;IACrB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,OAAO;IACb,SAAS;KACP;KACA,eAAe,OAAO;KACtB,sBAAsB,OAAO;KAC9B;IACD;IACD,CAAC;AACF,UAAO,iBAAiB,OAAO;QAE/B,WAAU,QAAQ,KAAK;GACrB,MAAM,OAAO;GACb,SAAS;IACP;IACA,eAAe,OAAO;IACtB,sBAAsB,OAAO;IAC9B;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,iBAAiB,CAAC,SAAS,CAAC,mBAAmB;EAC5D,MAAM,QAAQ,iBAAiB,gBAAgB;AAC/C,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB,MAAM;GACN,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;AAoB5D,eAAe,eACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAChB,kBACD;CAED,MAAM,mBAAmB,kBAA0B;AACjD,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,YAAY,kBAAkB,MAAM,OAAO,mBAAmB;KACpE;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,YAAY,cAAc;YAC3B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;AAGJ,MAAK,MAAM,eAAe,OAAO;EAC/B,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,qBAAqB,MAAM,gBAAgB,OAAO,KAAK;EAC7D,MAAM,kCAAkB,IAAI,KAAa;AACzC,qBAAmB,SAAS,gBAAc;AACxC,mBAAgB,IAAIC,YAAU,KAAK;IACnC;EACF,MAAM,YAAY,OAAO;AACzB,MAAI,UACF,KAAI,gBAAgB,IAAI,UAAU,KAAK,EAAE;AACvC,aAAU,QAAQ,KAAK;IACrB,MAAM,UAAU;IAChB;IACA,SAAS;KACP;KACA,eAAe,OAAO;KACtB,WAAW,eAAe,UAAU;KACrC;IACF,CAAC;AACF,mBAAgB,OAAO,UAAU,KAAK;QAEtC,WAAU,QAAQ,KAAK;GACrB,MAAM,UAAU;GAChB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACtB,WAAW,eAAe,UAAU;IACrC;GACF,CAAC;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,OAAO;KACtB;KACD;IACF,CAAC;IACF;;AAGJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD2B,MAAM,gBAAgB,cAAc,EAC5C,SAAS,cAAc;AACxC,YAAU,QAAQ,KAAK;GACrB,MAAM,UAAU;GAChB,SAAS;IACP;IACA;IACA,MAAM,UAAU;IACjB;GACF,CAAC;GACF;AAEJ,QAAO;;AAGT,SAAS,eAAe,WAA2E;AACjG,SAAQ,UAAU,MAAlB;EACE,KAAK,UACH,QAAO;GACL,MAAM,UAAU;GAChB,UAAU,uBAAuB;GACjC,QAAQ,EACN,QAAQ;IACN,MAAM;IACN,OAAO;KACL,aAAa,UAAU;KACvB,UAAU,UAAU;KACpB,WAAW,UAAU;KACrB,eAAe,UAAU;KAC1B;IACF,EACF;GACF;EACH,KAAK,OACH,QAAO;GACL,MAAM,UAAU;GAChB,UAAU,uBAAuB;GACjC,QAAQ,EACN,QAAQ;IACN,MAAM;IACN,OAAO;KACL,GAAI,UAAU,gBAAgB,SAC1B,EAAE,aAAa,UAAU,aAAa,GACtC,EAAE,aAAa,UAAU,aAAc;KAC3C,mBAAmB,UAAU;KAC9B;IACF,EACF;GACF;EACH,KAAK,OACH,QAAO;GACL,MAAM,UAAU;GAChB,UAAU,uBAAuB;GACjC,QAAQ,EACN,QAAQ;IACN,MAAM;IACN,OAAO;KACL,aAAa,UAAU;KACvB,iBAAiB;MACf,WAAW,UAAU,aAAa;MAClC,WAAW,UAAU,aAAa;MACnC;KACD,aAAa,UAAU;KACvB,WAAW,UAAU;KACrB,eAAe,UAAU;KAC1B;IACF,EACF;GACF;EACH,KAAK,aACH,QAAO;GACL,MAAM,UAAU;GAChB,UAAU,uBAAuB;GAEjC,QAAQ,EAAE;GACX;EACH,QACE,OAAM,IAAI,MAAM,wBAAwB,YAA4B;;;AAI1E,eAAe,sBACb,QACA,aACA,kBAC8D;CAC9D,IAAI;AACJ,KAAI;AACF,eAAa,MAAM,OAAO,cAAc;GACtC;GACA,eAAe,iBAAiB;GACjC,CAAC;UACK,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MACR,iBAAiB,iBAAiB,UAAU,8DAC7C;AAEH,QAAM;;CAER,MAAM,YAAY,MAAM,OAAO,aAAa;EAC1C;EACA,eAAe,iBAAiB;EAChC,MAAM,iBAAiB;EACxB,CAAC;CACF,MAAM,YAAY,mBAAmB,iBAAiB,WAAW,iBAAiB,WAAW;CAC7F,MAAM,YAAY,oBAAoB,iBAAiB,WAAW,iBAAiB,WAAW;AAC9F,QAAO,EACL,QAAQ;EACN,MAAM;EACN,OAAO;GACL,aAAa,UAAU,QAAQ;GAC/B,iBAAiB;IACf;IACA;IACD;GACD,aAAa,WAAW,YAAY;GACpC,eAAe;GAChB;EACF,EACF;;AAkBH,eAAe,uBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAIhB,0BAA0B;AAE5B,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,OAAO,GAAG,OAAO,KAAK;AAC5B,MAAI;AACF,SAAM,OAAO,qBAAqB;IAChC;IACA,eAAe,OAAO;IACvB,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,UAAU;IACjE,MAAM,uBAAuB,KAAK;AAClC,QAAI,qBACF,WAAU,QAAQ,KAAK;KACrB;KACA,SAAS;MACP;MACA,eAAe,OAAO;MACtB,2BAA2B,uBAAuB,qBAAqB;MACxE;KACF,CAAC;AAEJ;;AAEF,SAAM;;EAER,MAAM,uBAAuB,KAAK;AAClC,MAAI,qBACF,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACtB,2BAA2B,uBAAuB,qBAAqB;IACxE;GACF,CAAC;MAEF,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACvB;GACF,CAAC;;AAIN,MAAK,MAAM,iBAAiB,iBAAiB;AAC3C,MAAI;AACF,SAAM,OAAO,qBAAqB;IAChC;IACA;IACD,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD;AAEF,SAAM;;AAER,YAAU,QAAQ,KAAK;GACrB,MAAM,GAAG,cAAc;GACvB,SAAS;IACP;IACA;IACD;GACF,CAAC;;AAEJ,QAAO;;AAGT,SAAS,uBACP,aAC0D;CAE1D,MAAM,eAAe,YAAY,aAC7B,OAAO,YAAY,OAAO,KAAK,YAAY,WAAW,CAAC,KAAK,QAAQ,CAAC,KAAK,IAAI,CAAC,CAAC,GAChF;AAEJ,QAAO;EACL,UAAU;EACV,cAAc,kDAAkD;EAChE,QAAQ,EACN,QAAQ;GACN,MAAM;GACN,OAAO;IACL,WAAW,YAAY;IACvB,MAAM,YAAY,KAAK;IACvB,eAAe,YAAY;IAC3B,eAAe;IACf,kBAAkB,YAAY;IAC9B;IACD;GACF,EACF;EACF;;AAkBH,eAAe,kBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAChB,qBACD;AAED,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,OAAO,GAAG,OAAO,KAAK;AAC5B,MAAI;AACF,SAAM,OAAO,gBAAgB;IAC3B;IACA,eAAe,OAAO;IACvB,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,UAAU;AACjE,QAAI,OAAO,eACT,WAAU,QAAQ,KAAK;KACrB;KACA,SAAS;MACP;MACA,eAAe,OAAO;MACtB,sBAAsB,kBAAkB,OAAO,eAAe;MAC/D;KACF,CAAC;AAEJ;;AAEF,SAAM;;AAER,MAAI,OAAO,eACT,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACtB,sBAAsB,kBAAkB,OAAO,eAAe;IAC/D;GACF,CAAC;MAEF,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACvB;GACF,CAAC;;AAIN,MAAK,MAAM,iBAAiB,iBAAiB;AAC3C,MAAI;AACF,SAAM,OAAO,gBAAgB;IAC3B;IACA;IACD,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD;AAEF,SAAM;;AAER,YAAU,QAAQ,KAAK;GACrB,MAAM,GAAG,cAAc;GACvB,SAAS;IACP;IACA;IACD;GACF,CAAC;;AAEJ,QAAO;;AAGT,SAAS,kBACP,cACqD;AACrD,QAAO;EACL,cAAc,wCAAwC;EACtD,QAAQ,EACN,QAAQ;GACN,MAAM;GACN,OAAO;IACL,WAAW,aAAa;IACxB,MAAM,aAAa;IACnB,gBAAgB,aAAa;IAC9B;GACF,EACF;EACF;;AAkBH,eAAe,iBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAChB,oBACD;CAED,MAAM,qBAAqB,kBAA0B;AACnD,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,cAAc,kBAAkB,MAAM,OAAO,qBAAqB;KACxE;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,cAAc,cAAc;YAC7B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;AAGJ,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,uBAAuB,MAAM,kBAAkB,OAAO,KAAK;EACjE,MAAM,kCAAkB,IAAI,KAAa;AACzC,uBAAqB,SAAS,gBAAgB;AAC5C,mBAAgB,IAAI,YAAY,KAAK;IACrC;AACF,OAAK,MAAM,mBAAmB,OAAO,KAAK,OAAO,gBAAgB,EAAE,CAAC,EAAE;GACpE,MAAM,cAAc,OAAO,eAAe;AAC1C,OAAI,CAAC,YACH;AAEF,OAAI,gBAAgB,IAAI,gBAAgB,EAAE;AACxC,cAAU,QAAQ,KAAK;KACrB,MAAM;KACN,SAAS;MACP;MACA,eAAe,OAAO;MACtB,MAAM;MACN,YAAY,YAAY;MACxB,cAAc,YAAY,aACtB,6BAA6B,YAAY,WAAW,GACpD;MACL;KACF,CAAC;AACF,oBAAgB,OAAO,gBAAgB;SAEvC,WAAU,QAAQ,KAAK;IACrB,MAAM;IACN,SAAS;KACP;KACA,eAAe,OAAO;KACtB,MAAM;KACN,YAAY,YAAY;KACxB,cAAc,YAAY,aACtB,6BAA6B,YAAY,WAAW,GACpD;KACL;IACF,CAAC;;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,OAAO;KACtB;KACD;IACF,CAAC;IACF;;AAGJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD6B,MAAM,kBAAkB,cAAc,EAC9C,SAAS,gBAAgB;AAC5C,YAAU,QAAQ,KAAK;GACrB,MAAM,YAAY;GAClB,SAAS;IACP;IACA,eAAe;IACf,MAAM,YAAY;IACnB;GACF,CAAC;GACF;AAEJ,QAAO;;AAGT,SAAS,6BACP,cACsD;CACtD,MAAM,MAA4D,EAAE;AACpE,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,aAAa,CACrD,KAAI,OAAO,SAAS,aAAa,SAAS,KAAK;AAEjD,QAAO;;AAwBT,eAAe,kBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAKhB,qBAAqB;CAEvB,MAAM,sBAAsB,kBAA0B;AACpD,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,eAAe,kBAAkB,MAAM,OAAO,sBAAsB;KAC1E;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,eAAe,cAAc;YAC9B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;AAGJ,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,wBAAwB,MAAM,mBAAmB,OAAO,KAAK;EACnE,MAAM,qCAAqB,IAAI,KAA0C;AACzE,wBAAsB,SAAS,iBAAiB;AAC9C,sBAAmB,IAAI,aAAa,MAAM,aAAa,WAAW;IAClE;AACF,OAAK,MAAM,oBAAoB,OAAO,KAAK,OAAO,iBAAiB,EAAE,CAAC,EAAE;GACtE,MAAM,eAAe,OAAO,gBAAgB;AAC5C,OAAI,CAAC,aACH;GAEF,MAAM,kBAAkB,kBAAkB,kBAAkB,aAAa;AACzE,OAAI,mBAAmB,IAAI,iBAAiB,EAAE;AAE5C,QAD2B,mBAAmB,IAAI,iBAAiB,KACxC,gBAAgB,WAEzC,WAAU,SAAS,KAAK;KACtB,MAAM;KACN,eAAe;MACb;MACA,eAAe,OAAO;MACtB,MAAM;MACP;KACD,eAAe;MACb;MACA,eAAe,OAAO;MACtB,cAAc;MACf;KACF,CAAC;QAEF,WAAU,QAAQ,KAAK;KACrB,MAAM;KACN,SAAS;MACP;MACA,eAAe,OAAO;MACtB,cAAc;MACf;KACF,CAAC;AAEJ,uBAAmB,OAAO,iBAAiB;SAE3C,WAAU,QAAQ,KAAK;IACrB,MAAM;IACN,SAAS;KACP;KACA,eAAe,OAAO;KACtB,cAAc;KACf;IACF,CAAC;;AAGN,qBAAmB,SAAS,GAAG,SAAS;AACtC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,OAAO;KACtB;KACD;IACF,CAAC;IACF;;AAGJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD8B,MAAM,mBAAmB,cAAc,EAC/C,SAAS,iBAAiB;AAC9C,YAAU,QAAQ,KAAK;GACrB,MAAM,aAAa;GACnB,SAAS;IACP;IACA;IACA,MAAM,aAAa;IACpB;GACF,CAAC;GACF;AAGJ,QAAO;;AAGT,SAAS,kBACP,kBACA,cACiD;CAEjD,MAAM,SAAS,mBAAmB,MAAM,aAAa;AAErD,QAAO;EACL,MAAM;EACN,aAAa,OAAO;EACpB,YAAY,OAAO,YAAY,KAAK,cAAc;AAChD,WAAQ,WAAR;IACE,KAAK,qBACH,QAAO,2BAA2B;IACpC,KAAK,gBACH,QAAO,2BAA2B;IACpC,QACE,OAAM,IAAI,MAAM,qCAAqC,YAA4B;;IAErF;EACF,cAAc,OAAO;EACrB,YACE;GACE,cAAc,4BAA4B;GAC1C,QAAQ,4BAA4B;GACpC,SAAS,4BAA4B;GACtC,CACD,OAAO,cAAc;EACvB,qBAAqB,OAAO;EAC5B,sBAAsB,OAAO;EAC7B,aAAa,OAAO;EACrB;;AAkBH,eAAe,gBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAChB,mBACD;AAED,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,OAAO,GAAG,OAAO,KAAK;AAC5B,MAAI;AACF,SAAM,OAAO,kBAAkB;IAC7B;IACA,eAAe,OAAO;IACvB,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,UAAU;AACjE,QAAI,OAAO,KACT,WAAU,QAAQ,KAAK;KACrB;KACA,SAAS;MACP;MACA,eAAe,OAAO;MACtB,YAAY,gBAAgB,OAAO,KAAK;MACzC;KACF,CAAC;AAEJ;;AAEF,SAAM;;AAER,MAAI,OAAO,KACT,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACtB,YAAY,gBAAgB,OAAO,KAAK;IACzC;GACF,CAAC;MAEF,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe,OAAO;IACvB;GACF,CAAC;;AAIN,MAAK,MAAM,iBAAiB,iBAAiB;AAC3C,MAAI;AACF,SAAM,OAAO,kBAAkB;IAC7B;IACA;IACD,CAAC;WACK,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD;AAEF,SAAM;;AAER,YAAU,QAAQ,KAAK;GACrB,MAAM,GAAG,cAAc;GACvB,SAAS;IACP;IACA;IACD;GACF,CAAC;;AAEJ,QAAO;;AAGT,SAAS,gBAAgB,YAAuE;CAC9F,IAAI;AACJ,SAAQ,WAAW,cAAc,MAAjC;EACE,KAAK;AACH,uBAAoB,iCAAiC;AACrD;EACF,KAAK;AACH,uBAAoB,iCAAiC;AACrD;EACF,QACE,OAAM,IAAI,MACR,oCAAoC,WAAW,cAAc,OAC9D;;AAGL,QAAO;EACL,iBAAiB,WAAW;EAC5B;EACA,qBAAqB;GACnB,MAAM;GACN,OAAO;IACL,WAAW,WAAW,cAAc,cAAc;IAClD,WAAW,WAAW,cAAc,cAAc;IACnD;GACF;EACF;;AAkBH,eAAe,kBACb,QACA,aACA,OACA,iBACA;CACA,MAAM,YAAY,gBAChB,qBACD;CAED,MAAM,qBAAqB,OAAO,kBAA0B;AAC1D,MAAI;GACF,MAAM,EAAE,kBAAkB,MAAM,OAAO,qBAAqB;IAC1D;IACA;IACD,CAAC;AACF,UAAO;WACA,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,EAAE;AAEX,SAAM;;;AAIV,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,EAAE,cAAc,WAAW;EACjC,MAAM,wBAAwB,MAAM,mBAAmB,OAAO,KAAK;EACnE,MAAM,kCAAkB,IAAI,KAAa;AACzC,wBAAsB,SAAS,iBAAiB;AAC9C,mBAAgB,IAAI,aAAa,KAAK;IACtC;AACF,OAAK,MAAM,gBAAgB,OAAO,MAAM,aAAa,EAAE,CACrD,KAAI,gBAAgB,IAAI,aAAa,KAAK,EAAE;AAC1C,aAAU,QAAQ,KAAK;IACrB,MAAM,aAAa;IACnB,SAAS;KACP;KACA,eAAe,OAAO;KACtB,cAAc,kBAAkB,aAAa;KAC9C;IACF,CAAC;AACF,mBAAgB,OAAO,aAAa,KAAK;QAEzC,WAAU,QAAQ,KAAK;GACrB,MAAM,aAAa;GACnB,SAAS;IACP;IACA,eAAe,OAAO;IACtB,cAAc,kBAAkB,aAAa;IAC9C;GACF,CAAC;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,OAAO;KACtB;KACD;IACF,CAAC;IACF;;AAGJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD8B,MAAM,mBAAmB,cAAc,EAC/C,SAAS,iBAAiB;AAC9C,YAAU,QAAQ,KAAK;GACrB,MAAM,aAAa;GACnB,SAAS;IACP;IACA;IACA,MAAM,aAAa;IACpB;GACF,CAAC;GACF;AAEJ,QAAO;;AAGT,SAAS,kBACP,cACiD;AACjD,QAAO;EACL,MAAM,aAAa;EACnB,mBAAmB,aAAa;EAChC,cAAc,aAAa;EAC3B,YAAY;GACV,MAAM,aAAa,WAAW;GAC9B,YAAY,aAAa,WAAW,WAAW,KAAK,SAAS,mBAAmB,KAAK,CAAC;GACvF;EACD,kBAAkB,aAAa,iBAAiB,KAAK,UAAU;GAC7D,eAAe,KAAK;GACpB,UAAU,KAAK;GAChB,EAAE;EACJ;;AAGH,SAAS,mBAAmB,MAAuE;CACjG,IAAI;AACJ,SAAQ,KAAK,MAAb;EACE,KAAK;AACH,SAAM,uBAAuB;AAC7B;EACF,KAAK;AACH,SAAM,uBAAuB;AAC7B;EACF,KAAK;AACH,SAAM,uBAAuB;AAC7B;EACF,KAAK;AACH,SAAM,uBAAuB;AAC7B;EACF,KAAK;AACH,SAAM,uBAAuB;AAC7B;EACF,QACE,OAAM,IAAI,MAAM,gCAAgC,KAAK,OAAuB;;CAEhF,IAAI;AACJ,KAAI,KAAK,WACP,SAAQ,KAAK,YAAb;EACE,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,QACE,OAAM,IAAI,MAAM,sCAAsC,KAAK,aAA6B;;CAG9F,IAAI;AACJ,KAAI,KAAK,WACP,SAAQ,KAAK,YAAb;EACE,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,KAAK;AACH,gBAAa,6BAA6B;AAC1C;EACF,QACE,OAAM,IAAI,MAAM,sCAAsC,KAAK,aAA6B;;AAG9F,QAAO;EACL,MAAM;EACN,MAAM,KAAK;EACX,aAAa,KAAK;EAClB;EACA,UAAU,KAAK;EACf,aAAa,KAAK;EAClB;EACA,iBAAiB,KAAK,mBAAmB;EACzC,eAAe,KAAK,eAAe,KAAK,WAAS,mBAAmBC,OAAK,CAAC;EAC3E;;;;;;;;;;;;AC/5CH,eAAsB,qBACpB,WACA,SACA,KACe;AACf,KAAI,UAAU,WAAW,EAAG;CAE5B,MAAM,gBAAgB,CAAC,GAAG,IAAI,IAAI,UAAU,KAAK,MAAM,EAAE,aAAa,CAAC,CAAC;AAExE,QAAO,KAAK,sCAAsC;AAElD,QAAO,IACL,KAAK,OAAO,QAAQ,yBAAyB,CAAC,IAAI,cAAc,KAAK,MAAM,OAAO,KAAK,IAAI,EAAE,GAAG,CAAC,CAAC,KAAK,KAAK,GAC7G;AACD,QAAO,IAAI,KAAK,OAAO,QAAQ,kBAAkB,CAAC,WAAW,OAAO,KAAK,IAAI,QAAQ,GAAG,GAAG;AAC3F,QAAO,SAAS;AAChB,QAAO,IAAI,KAAK,OAAO,KAAK,YAAY,CAAC,GAAG;AAC5C,MAAK,MAAM,KAAK,UACd,QAAO,IAAI,SAAS,OAAO,KAAK,EAAE,aAAa,CAAC,GAAG,OAAO,KAAK,IAAI,EAAE,aAAa,GAAG,GAAG;AAG1F,KAAI,KAAK;AACP,SAAO,QAAQ,gDAAgD,EAC7D,MAAM,SACP,CAAC;AACF;;CAGF,MAAM,gBACJ,cAAc,WAAW,IACrB,4CAA4C,QAAQ,MAAM,OAAO,IAAI,0CAA0C,KAC/G,4CAA4C,QAAQ;AAK1D,KAAI,CAJc,MAAM,OAAO,OAAO,eAAe;EACnD,MAAM;EACN,SAAS;EACV,CAAC,CAEA,OAAM,IAAI,MAAM,EAAE;;;MAGhB;;;;;;;;;AAWN,eAAsB,0BACpB,WACA,SACA,KACe;AACf,KAAI,UAAU,WAAW,EAAG;AAE5B,QAAO,KAAK,2DAA2D;AAEvE,QAAO,IAAI,KAAK,OAAO,KAAK,YAAY,CAAC,GAAG;AAC5C,MAAK,MAAM,KAAK,UACd,QAAO,IAAI,SAAS,OAAO,KAAK,EAAE,aAAa,CAAC,GAAG,OAAO,KAAK,IAAI,EAAE,aAAa,GAAG,GAAG;AAE1F,QAAO,SAAS;AAChB,QAAO,IAAI,oFAAoF;AAC/F,QAAO,IAAI,6DAA6D;AACxE,QAAO,IACL,iGACD;AAED,KAAI,KAAK;AACP,SAAO,QAAQ,cAAc,QAAQ,8BAA8B,EACjE,MAAM,SACP,CAAC;AACF;;AAOF,KAAI,CAJc,MAAM,OAAO,OAC7B,mDAAmD,QAAQ,KAC3D;EAAE,MAAM;EAAW,SAAS;EAAO,CACpC,CAEC,OAAM,IAAI,MAAM,EAAE;;;MAGhB;;;;;;;;AAeN,eAAsB,iCACpB,WACA,KACe;AACf,KAAI,UAAU,WAAW,EAAG;AAE5B,QAAO,KAAK,2CAA2C;AAEvD,QAAO,IAAI,KAAK,OAAO,KAAK,YAAY,CAAC,GAAG;AAC5C,MAAK,MAAM,KAAK,UACd,QAAO,IAAI,SAAS,OAAO,KAAK,EAAE,aAAa,CAAC,GAAG,OAAO,MAAM,IAAI,EAAE,aAAa,GAAG,GAAG;AAE3F,QAAO,SAAS;AAChB,QAAO,IACL,OAAO,QAAQ,0EAA0E,CAC1F;AAED,KAAI,KAAK;AACP,SAAO,QAAQ,gDAAgD,EAC7D,MAAM,SACP,CAAC;AACF;;AAOF,KAAI,CAJc,MAAM,OAAO,OAAO,oDAAoD;EACxF,MAAM;EACN,SAAS;EACV,CAAC,CAEA,OAAM,IAAI,MAAM,EAAE;;;MAGhB;;;;;;;;;;;;;;;;;;;;;;;;AChIN,MAAM,uBACJ;;;;;;;;;;;;;AAeF,SAAgB,sBACd,aACA,KACQ;CACR,MAAM,UAAU,QAAQ,KAAK,UAAU,IAAI;AAE3C,SAAQ,aAAR;EAEE,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK,yBACH,QAAO,iDAAiD,qBAAqB,IAAI,QAAQ;EAG3F,KAAK,mBACH,QAAO,iDAAiD,qBAAqB,mGAAmG,QAAQ;EAG1L,KAAK,kBACH,QAAO,yEAAyE,QAAQ;EAE1F,QACE,OAAM,IAAI,MAAM,6CAA6C,cAA8B;;;;;;;;;;;;;;AAmBjG,SAAgB,+BACd,KACQ;AACR,QAAO,sDAAsD,cAAc,SAAS,KAAK,UAAU,IAAI,CAAC;;;;;ACzE1G,MAAM,aAAa,KAAK;;;;;;AA+BxB,SAAS,mBAAmB,SAAyB;AACnD,QAAO,OAAO,WAAW,SAAS,CAAC,OAAO,SAAS,QAAQ,CAAC,OAAO,MAAM;;AAG3E,SAAS,oBAAoB,aAAqB,MAAc;AAC9D,QAAO,oBAAoB,YAAY,qBAAqB;;;;;;;;AAS9D,SAAgB,qBAAqB,WAAmB,cAA8B;AACpF,QAAO,aAAa,UAAU,IAAI;;;;;;;AAQpC,SAAgB,qBAAqB,cAA8B;AACjE,QAAO,aAAa;;;;;;;AAQtB,SAAgB,wBAAwB,SAAyB;AAC/D,QAAO,aAAa;;;;;;;;AAStB,SAAgB,uBACd,aACA,cACiB;CACjB,MAAM,UAA2B,EAAE;CACnC,MAAM,UAAU,YAAY;AAG5B,MAAK,MAAM,OAAO,YAAY,aAC5B,MAAK,MAAM,YAAY,IAAI,iBACzB,MAAK,MAAM,YAAY,OAAO,OAAO,SAAS,UAAU,EAAE;EACxD,MAAM,aAAa,KAAK,KAAK,SAAS,aAAa,GAAG,SAAS,KAAK,KAAK;AACzE,MAAI;GACF,MAAM,UAAUC,KAAG,aAAa,YAAY,QAAQ;AACpD,WAAQ,KAAK;IACX,MAAM,qBAAqB,SAAS,WAAW,SAAS,KAAK;IAC7D,eAAe;IACf,aAAa,mBAAmB,QAAQ;IACxC,aAAa,aAAa,SAAS,UAAU,GAAG,SAAS;IAC1D,CAAC;UACI;AACN,UAAO,KAAK,4BAA4B,aAAa;;;AAO7D,KAAI,YAAY,iBAAiB;EAC/B,MAAM,YAAY,YAAY,gBAAgB;AAC9C,OAAK,MAAM,YAAY,OAAO,OAAO,UAAU,CAC7C,KAAI,SAAS,UAAU,SAAS,cAAc,SAAS,UAAU,SAAS,eAAe;GACvF,MAAM,aAAa,KAAK,KAAK,SAAS,aAAa,GAAG,SAAS,KAAK,KAAK;AACzE,OAAI;IACF,MAAM,UAAUA,KAAG,aAAa,YAAY,QAAQ;AACpD,YAAQ,KAAK;KACX,MAAM,qBAAqB,SAAS,KAAK;KACzC,eAAe;KACf,aAAa,mBAAmB,QAAQ;KACxC,aAAa,aAAa,SAAS;KACpC,CAAC;WACI;AACN,WAAO,KAAK,4BAA4B,aAAa;;;;AAO7D,MAAK,MAAM,OAAO,cAAc;EAC9B,MAAM,aAAa,KAAK,KAAK,SAAS,iBAAiB,GAAG,IAAI,KAAK,KAAK;AACxE,MAAI;GACF,MAAM,UAAUA,KAAG,aAAa,YAAY,QAAQ;AACpD,WAAQ,KAAK;IACX,MAAM,wBAAwB,IAAI,KAAK;IACvC,eAAe;IACf,aAAa,mBAAmB,QAAQ;IACxC,aAAa,iBAAiB,IAAI;IACnC,CAAC;UACI;AACN,UAAO,KAAK,4BAA4B,aAAa;;;AAIzD,QAAO;;;;;;;;;;AAgBT,eAAsB,qBACpB,QACA,aACA,SACA,SACA;CACA,MAAM,YAAY,gBAChB,oBACD;CACD,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAGxC,MAAM,oBAAoB,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACzE,MAAI;GACF,MAAM,WAAW,MAAM,OAAO,uBAAuB;IACnD;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CACL,SAAS,UAAU,KAChB,OAAyB;IACxB,MAAM,EAAE;IACR,aAAa,EAAE;IAChB,EACF,EACD,SAAS,cACV;WACM,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CAGF,MAAM,cAA2C,EAAE;AACnD,OAAM,QAAQ,IACZ,kBAAkB,IAAI,OAAO,SAAS;EACpC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAK,oBAAoB,aAAa,KAAK,KAAK,EACjD,CAAC;AACF,cAAY,KAAK,QAAQ;GACvB,UAAU;GACV,OAAO,UAAU,OAAO;GACzB;GACD,CACH;AAGD,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,WAAW,YAAY,MAAM;EACnC,MAAM,cAAc,MAAM,iBACxB,oBAAoB,aAAa,MAAM,KAAK,EAC5C,QACD;AAED,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,MAAM;IACrB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,MAAM;IACpB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,MAAM;IACZ;IACA;IACD,CAAC;AACF,UAAO,YAAY,MAAM;QAEzB,WAAU,QAAQ,KAAK;GACrB,MAAM,MAAM;GACZ;GACA;GACD,CAAC;;AAKN,MAAK,MAAM,CAAC,MAAM,aAAa,OAAO,QAAQ,YAAY,EAAE;AAC1D,MAAI,CAAC,SAAU;EACf,MAAM,QAAQ,SAAS;AACvB,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB;GACA;GACD,CAAC;;AAIN,WAAU,OAAO;AACjB,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;;;;;;;;AAU5D,eAAe,qBACb,QACA,aACA,OACA,UACA;CACA,MAAM,SAAS,OAAO,KAAK,MAAM,eAAe,QAAQ;CAExD,MAAM,OAAO;EACX;EACA,MAAM,MAAM;EACZ,aAAa,MAAM;EACnB,WAAW,OAAO,OAAO,OAAO;EAChC,aAAa,MAAM;EACpB;AAED,KAAI,UAAU;;EAEZ,gBAAgB,eAEd;AACA,SAAM,EAAE,SAAS;IAAE,MAAM;IAAiB,OAAO;IAAM,EAAE;AACzD,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,WACtC,OAAM,EACJ,SAAS;IACP,MAAM;IACN,OAAO,OAAO,SAAS,GAAG,KAAK,IAAI,IAAI,YAAY,OAAO,OAAO,CAAC;IACnE,EACF;;AAGL,QAAM,OAAO,uBAAuB,cAAc,CAAC;QAC9C;;EAEL,gBAAgB,eAEd;AACA,SAAM,EAAE,SAAS;IAAE,MAAM;IAAiB,OAAO;IAAM,EAAE;AACzD,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,WACtC,OAAM,EACJ,SAAS;IACP,MAAM;IACN,OAAO,OAAO,SAAS,GAAG,KAAK,IAAI,IAAI,YAAY,OAAO,OAAO,CAAC;IACnE,EACF;;AAGL,QAAM,OAAO,uBAAuB,cAAc,CAAC;;;;;;;;;;AAWvD,eAAsB,sBACpB,QACA,aACA,QACA,QAAyD,iBACzD;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,iBAAiB;AAE7B,OAAK,MAAMC,YAAU,UAAU,SAAS;AACtC,SAAM,qBAAqB,QAAQ,aAAaA,SAAO,OAAO,KAAK;AACnE,SAAM,OAAO,YAAYA,SAAO,YAAY;;AAI9C,OAAK,MAAM,UAAU,UAAU,SAAS;AACtC,SAAM,qBAAqB,QAAQ,aAAa,OAAO,OAAO,MAAM;AACpE,SAAM,OAAO,YAAY,OAAO,YAAY;;YAErC,UAAU,SACnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,KAAK,QACrB,OAAO,uBAAuB;EAC5B,aAAa,IAAI;EACjB,MAAM,IAAI;EACX,CAAC,CACH,CACF;;;;;;;;;;;;ACrVL,eAAsB,cACpB,QACA,QACA,QAAyD,iBACzD;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,gBAEZ,OAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,IAAI,OAAO,aAAW;AACzC,QAAM,OAAO,uBAAuBC,SAAO,QAAQ;AACnD,QAAM,OAAO,YAAYA,SAAO,YAAY;GAC5C,EACF,GAAG,UAAU,QAAQ,IAAI,OAAO,WAAW;AACzC,QAAM,OAAO,uBAAuB,OAAO,QAAQ;AACnD,QAAM,OAAO,YAAY,OAAO,YAAY;GAC5C,CACH,CAAC;UACO,UAAU,SAGnB,OAAM,QAAQ,IAAI,UAAU,QAAQ,KAAK,QAAQ,OAAO,uBAAuB,IAAI,QAAQ,CAAC,CAAC;;AAqBjG,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,YAAY;;;;;;;AAQrD,eAAsB,aAAa,SAAsB;CACvD,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,YAAY,gBAAgE,YAAY;CAC9F,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAExC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,wBAAW,kBAAkB,MAAM,OAAO,sBAAsB;IACtE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAACC,aAAW,cAAc;WAC1B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,oBAA8D,EAAE;AACtE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;EACnC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAKD,MAAI,aAAa,SAAS,KAAK,EACrC,CAAC;AACF,oBAAkB,SAAS,QAAQ;GACjC;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;CAED,MAAM,YAAY,aAAa,EAAE,GAAK,MAAM,YAAY,iBAAiB,eAAe,IAAK,EAAE;AAC/F,MAAK,MAAM,YAAY,OAAO,OAAO,UAAU,EAAE;EAC/C,MAAM,WAAW,kBAAkB,SAAS;EAC5C,MAAM,cAAc,MAAM,iBAAiBA,MAAI,aAAa,SAAS,KAAK,EAAE,YAAY,KAAK;AAC7F,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;YACO,SAAS,UAAU,YAAY,KACxC,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACvB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,SAAS;IACf,SAAS;KACP;KACA,UAAU,cAAc,YAAY,MAAM,UAAU,YAAY,IAAI;KACrE;IACD;IACD,CAAC;AACF,UAAO,kBAAkB,SAAS;QAElC,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf,SAAS;IACP;IACA,UAAU,cAAc,YAAY,MAAM,UAAU,YAAY,IAAI;IACrE;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,kBAAkB,CAAC,SAAS,CAAC,UAAU;EACpD,MAAM,QAAQ,kBAAkB,OAAO;AACvC,MAAI,SAAS,UAAU,YAAY,KACjC,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,YAAY,KACxB,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,WAAU,OAAO;AACjB,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;AAG5D,SAAS,cACP,SACA,UACA,KACiD;CACjD,MAAM,UAAU,SAAS;CACzB,IAAI;CACJ,IAAI;CAEJ,MAAM,WAAW,sBAAsB,QAAQ,MAAM,IAAI;CAEzD,MAAM,YAAmD;EACvD,eAAe;EACf,eAAe;EACf,eAAe;EACf,kBAAkB;EAClB,gBAAgB;EAChB,gBAAgB;EAChB,gBAAgB;EAChB,uBAAuB;EACvB,0BAA0B;EAC1B,wBAAwB;EACzB;AACD,SAAQ,QAAQ,MAAhB;EACE,KAAK;AACH,iBAAc,oBAAoB;AAClC,mBAAgB,EACd,QAAQ;IACN,MAAM;IACN,OAAO;KACL,UAAU,QAAQ;KAClB,WAAW,QAAQ;KACpB;IACF,EACF;AACD;EACF,KAAK;EACL,KAAK;EACL,KAAK;AACH,iBAAc,oBAAoB;AAClC,mBAAgB,EACd,QAAQ;IACN,MAAM;IACN,OAAO;KACL,WAAW,UAAU,QAAQ;KAC7B,WAAW,EACT,MAAM,CACK,sBAAsB,QAAQ,SAAS,IAChD,GAAI,QAAQ,YACR,CAAU,IAAI,kBAAkB,QAAQ,UAAU,CAAC,IAAI,SAAS,GAAG,GACnE,EAAE,CACP,CAAC,KAAK,OAAO,EACf;KACF;IACF,EACF;AACD;EACF,KAAK;AACH,iBAAc,oBAAoB;AAClC,mBAAgB,EACd,QAAQ;IACN,MAAM;IACN,OAAO;KACL,WAAW,UAAU,QAAQ;KAC7B,WAAW,EACT,MAAM,CACK,0BAA0B,QAAQ,aAAa,IACxD,GAAI,QAAQ,YACR,CAAU,IAAI,kBAAkB,QAAQ,UAAU,CAAC,IAAI,SAAS,GAAG,GACnE,EAAE,CACP,CAAC,KAAK,OAAO,EACf;KACF;IACF,EACF;AACD;EACF,KAAK;AACH,iBAAc,oBAAoB;AAClC,mBAAgB,EACd,QAAQ;IACN,MAAM;IACN,OAAO,EAAE;IACV,EACF;AACD;EACF,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACH,iBAAc,oBAAoB;AAClC,mBAAgB,EACd,QAAQ;IACN,MAAM;IACN,OAAO,EACL,WAAW,UAAU,QAAQ,OAC9B;IACF,EACF;AACD;EACF,QACE,OAAM,IAAI,MAAM,oBAAoB,UAA0B;;CAGlE,MAAM,SAAS,SAAS;CACxB,IAAI;CACJ,IAAI;AAEJ,SAAQ,OAAO,MAAf;EACE,KAAK;AACH,gBAAa,mBAAmB;AAChC,kBAAe,EACb,QAAQ;IACN,MAAM;IACN,OAAO;KACL,KAAK,EACH,MAAM,IAAI,kBAAkB,OAAO,IAAI,CAAC,IAAI,SAAS,IACtD;KACD,SAAS,OAAO,UACZ,OAAO,QAAQ,OAAO,QAAQ,CAAC,KAAK,CAAC,KAAK,OAAO;MAC/C,IAAI;AACJ,UAAI,OAAO,MAAM,SACf,SAAQ;OACN,MAAM;OACN,OAAO;OACR;UAED,SAAQ;OACN,MAAM;OACN,OAAO;QACL,WAAW,EAAE;QACb,WAAW,EAAE;QACd;OACF;AAEH,aAAO;OAAE;OAAK;OAAO;OACrB,GACF;KACJ,MAAM,OAAO,cACT,EACE,MAAM,IAAI,kBAAkB,OAAO,YAAY,CAAC,IAAI,SAAS,IAC9D,GACD;KACL;IACF,EACF;AACD;EAEF,KAAK;AACH,gBAAa,mBAAmB;AAChC,kBAAe,EACb,QAAQ;IACN,MAAM;IACN,OAAO;KACL,SAAS,OAAO,WAAW;KAC3B,OAAO,OAAO;KACd,WAAW,OAAO,YACd,EACE,MAAM,IAAI,kBAAkB,OAAO,UAAU,CAAC,IAAI,SAAS,IAC5D,GACD;KACJ,SAAS,OAAO,eAAe;KAChC;IACF,EACF;AACD;EAEF,KAAK;EACL,KAAK;AACH,OAAI,OAAO,SAAS,WAClB,cAAa,mBAAmB;OAEhC,cAAa,mBAAmB;AAGlC,kBAAe,EACb,QAAQ;IACN,MAAM;IACN,OAAO;KACL,MAAM;KACN,WAAW,qBAAqB,SAAS,KAAK;KAC9C,WAAW,EACT,MAAM,UACP;KACD,SAAS,OAAO,eAAe;KAChC;IACF,EACF;AACD;EAEF,KAAK;AACH,gBAAa,mBAAmB;AAChC,kBAAe,EACb,QAAQ;IACN,MAAM;IACN,OAAO;KACL,cAAc,OAAO;KACrB,WAAW,OAAO,OACd,OAAO,OAAO,SAAS,aACrB,EAAE,MAAM,IAAI,kBAAkB,OAAO,KAAK,CAAC,IAAI,SAAS,IAAI,GAC5D,EAAE,MAAM,KAAK,UAAU,OAAO,KAAK,EAAE,GACvC;KACJ,SAAS,OAAO,eAAe;KAChC;IACF,EACF;AACD;EAEF,QACE,OAAM,IAAI,MAAM,mBAAmB,SAAyB;;AAGhE,QAAO;EACL,MAAM,SAAS;EACf,aAAa,SAAS;EACtB,UAAU,SAAS;EACnB;EACA;EACA;EACA;EACD;;;;;AChXH,MAAM,kBAAkB;CACtB,MAAM;EAAE,MAAM;EAAc,MAAM;EAAM;CACxC,QAAQ;EAAE,MAAM;EAAc,MAAM;EAAU;CAC9C,SAAS;EAAE,MAAM;EAAc,MAAM;EAAO;CAC5C,OAAO;EAAE,MAAM;EAAc,MAAM;EAAS;CAC5C,SAAS;EAAE,MAAM;EAAoB,MAAM;EAAW;CACtD,SAAS;EAAE,MAAM;EAAc,MAAM;EAAW;CAChD,MAAM;EAAE,MAAM;EAAoB,MAAM;EAAQ;CAChD,UAAU;EAAE,MAAM;EAAoB,MAAM;EAAY;CACxD,MAAM;EAAE,MAAM;EAAoB,MAAM;EAAQ;CACjD;;;;;;;;AAYD,eAAsB,cACpB,QACA,QACA,QAAuC,iBACvC;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,iBAAiB;AAE7B,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,aAAW;AACjD,SAAM,OAAO,sBAAsBE,SAAO,QAAQ;AAClD,SAAM,OAAO,YAAYA,SAAO,YAAY;IAC5C,EACF,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,WAAW;AACjD,SAAM,OAAO,sBAAsB,OAAO,QAAQ;AAClD,SAAM,OAAO,YAAY,OAAO,YAAY;IAC5C,CACH,CAAC;AAGF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,SAAS,QAAQ,KAAK,aAAW,OAAO,uBAAuBA,SAAO,QAAQ,CAAC,EAC5F,GAAG,UAAU,SAAS,QAAQ,KAAK,WAAW,OAAO,uBAAuB,OAAO,QAAQ,CAAC,CAC7F,CAAC;YACO,UAAU,mBAGnB,OAAM,QAAQ,IACZ,UAAU,SAAS,QAAQ,KAAK,QAAQ,OAAO,uBAAuB,IAAI,QAAQ,CAAC,CACpF;UACQ,UAAU,kBAEnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,QAAQ,KAAK,QAAQ,OAAO,sBAAsB,IAAI,QAAQ,CAAC,CAClF;;;;;;;AASL,eAAsB,aAAa,SAAsB;CACvD,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,YAAyC,EAAE;AACjD,KAAI,CAAC,WACH,MAAK,MAAM,YAAY,YAAY,kBAAkB;AACnD,QAAM,SAAS,eAAe;AAC9B,YAAU,KAAK,SAAS;;CAG5B,MAAM,YAAY,aACd,EAAE,GACF,OAAO,OAAQ,MAAM,YAAY,iBAAiB,eAAe,IAAK,EAAE,CAAC;CAE7E,MAAM,EACJ,WAAW,kBACX,WACA,WACA,mBACE,MAAMC,eAAa,QAAQ,aAAa,YAAY,MAAM,UAAU;CAExE,MAAM,oBAAoB,MAAM,cAC9B,QACA,aACA,WACA,WALsB,iBAAiB,QAAQ,KAAK,QAAQ,IAAI,KAAK,EAOrE,YAAY,IACb;AAED,kBAAiB,OAAO;AACxB,mBAAkB,OAAO;AACzB,QAAO;EACL,WAAW;GACT,SAAS;GACT,UAAU;GACX;EACD;EACA;EACA;EACD;;AAoBH,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,YAAY;;AAGrD,eAAeD,eACb,QACA,aACA,SACA,WACA;CACA,MAAM,YAAY,gBAChB,oBACD;CACD,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAExC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,kBAAkB,kBAAkB,MAAM,OAAO,qBAAqB;IAC5E;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,kBAAkB,cAAc;WACjC,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,mBAA6D,EAAE;AACrE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,MAAI,CAAC,SAAS,WAAW,KACvB;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAKC,MAAI,aAAa,SAAS,UAAU,KAAK,EAC/C,CAAC;AACF,mBAAiB,SAAS,UAAU,QAAQ;GAC1C;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;AAED,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,WAAW,iBAAiB,SAAS;EAC3C,MAAM,cAAc,MAAM,iBAAiBA,MAAI,aAAa,SAAS,UAAU,EAAE,QAAQ;AACzF,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACvB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,SAAS;IACf,SAAS;KACP;KACA,eAAe,SAAS;KACzB;IACD;IACD,CAAC;AACF,UAAO,iBAAiB,SAAS;QAEjC,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf,SAAS;IACP;IACA,eAAe,SAAS;IACzB;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,iBAAiB,CAAC,SAAS,CAAC,mBAAmB;EAC5D,MAAM,QAAQ,iBAAiB,gBAAgB;AAC/C,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB,MAAM;GACN,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;AAkB5D,eAAe,cACb,QACA,aACA,WACA,WACA,iBACA,KACA;CACA,MAAM,YAAY,gBAChB,qBACD;CAED,MAAM,kBAAkB,kBAA0B;AAChD,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,mBAAmB,kBAAkB,MAAM,OAAO,sBAAsB;KAC9E;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,mBAAmB,cAAc;YAClC,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;CAGJ,MAAM,wCAAwB,IAAI,KAAa;AAC/C,MAAK,MAAM,YAAY,UACrB,KAAI,SAAS,QAAQ,SAAS,mBAC5B,uBAAsB,IAAI,SAAS,QAAQ,aAAa;AAK5D,MAAK,MAAM,YAAY,UACrB,MAAK,MAAM,YAAY,OAAO,OAAO,SAAS,UAAU,CACtD,KAAI,sBAAsB,IAAI,SAAS,KAAK,IAAI,SAAS,kBAAkB,MACzE,OAAM,IAAI,MACR,aAAa,SAAS,KAAK,mMAE5B;AAKP,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,oBAAoB,MAAM,eAAe,SAAS,UAAU;EAClE,MAAM,kCAAkB,IAAI,KAAa;AACzC,oBAAkB,SAAS,aAAa;AACtC,mBAAgB,IAAI,SAAS,KAAK;IAClC;AACF,OAAK,MAAM,YAAY,OAAO,OAAO,SAAS,UAAU,CACtD,KAAI,gBAAgB,IAAI,SAAS,KAAK,EAAE;AACtC,aAAU,QAAQ,KAAK;IACrB,MAAM,SAAS;IACf,SAAS;KACP;KACA,eAAe,SAAS;KACxB,kBAAkB,gBAChB,SAAS,WACT,UACA,uBACA,IACD;KACF;IACF,CAAC;AACF,mBAAgB,OAAO,SAAS,KAAK;QAErC,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf,SAAS;IACP;IACA,eAAe,SAAS;IACxB,kBAAkB,gBAChB,SAAS,WACT,UACA,uBACA,IACD;IACF;GACF,CAAC;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,SAAS;KACxB,cAAc;KACf;IACF,CAAC;IACF;;AAGJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD0B,MAAM,eAAe,cAAc,EAC3C,SAAS,aAAa;AACtC,YAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf,SAAS;IACP;IACA;IACA,cAAc,SAAS;IACxB;GACF,CAAC;GACF;AAEJ,QAAO;;AAGT,SAAS,gBACP,WACA,UACA,uBACA,KACiD;CACjD,MAAM,YAAwE,CAC5E;EACE,MAAM;EACN,eAAe;EACf,aAAa,GAAG,SAAS,KAAK;EAC9B,eAAe,+BAA+B;EAC9C,oBAAoB,qBAAqB,WAAW,SAAS,KAAK;EAClE,eAAe,EACb,MAAM,+BAA+B,IAAI,EAC1C;EACD,YAAY;EACb,CACF;CAED,MAAM,eAAe,WAAW,SAAS,SAAS,KAAK;CAGvD,MAAM,SAAkE,SAAS,QAC7E,YAAY,SAAS,OAAO,GAAG,aAAa,QAAQ,KAAK,GACzD,EAAE;CAGN,MAAM,WAAkE,YACtE,EAAE,IAAI,SAAS,QAAQ,EACvB,GAAG,aAAa,SAChB,MACD,CAAC;CAGF,MAAM,sBAAsB,SAAS,eAAe,GAAG,SAAS,KAAK;CACrE,MAAM,oBAAoB,SAAS,OAAO,SAAS;CACnD,MAAM,sBAAsB,oBACxB,GAAG,oBAAoB,gBAAgB,sBACvC;CAKJ,IAAI,yBAAyB;AAC7B,KAAI,SAAS,kBAAkB,OAC7B,0BAAyB,SAAS;UACzB,sBAAsB,IAAI,SAAS,KAAK,CACjD,0BAAyB;AAG3B,QAAO;EACL,eAAe;EACf,aAAa;EACb;EACA,MAAM,SAAS;EACf,eAAe,SAAS;EACxB;EACA;EACA;EACD;;AAGH,SAAS,YACP,QACA,UACA,SACyD;AACzD,KAAI,CAAC,OACH,QAAO,EAAE;AAGX,QAAO,OAAO,QAAQ,OAAO,CAAC,KAAK,CAAC,WAAW,WAAW;EACxD,IAAI;EAEJ,MAAM,WADgB,WAAW,MAAM,SAAS,OAAO,WAAW,SACjC,QAAS,MAAM,SAAS,YAAY;AAErE,MAAI,MAAM,SAAS,UAAU;GAC3B,MAAM,WAAW,MAAM,SAAS,YAAY,GAAG,WAAW,WAAW,SAAS,UAAU;AACxF,UAAO;IACL,MAAM;IACN,MAAM;IACN,aAAa,MAAM,SAAS,eAAe;IAC3C;IACA,QAAQ,YAAY,MAAM,QAAQ,UAAU,QAAQ;IACrD;aACQ,MAAM,SAAS,OAExB,QAAO;GACL,MAAM;GACN,MAHe,MAAM,SAAS,YAAY,GAAG,WAAW,WAAW,SAAS,UAAU;GAItF;GACA,eAAe,MAAM,SAAS;GAC/B;MAED,QAAO;GAAE,GAAG,gBAAgB,MAAM;GAAO;GAAU;AAGrD,SAAO;GACL,MAAM;GACN,aAAa,MAAM,SAAS;GAC5B,OAAO,MAAM,SAAS,SAAS;GAC/B;GACA;GACD;GACD;;;;;AC1eJ,MAAM,qBAAqB,EAAE,OAAO,EAClC,QAAQ,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,QAAQ,CAAC,CAAC,EAC/D,CAAC;;;;;AAQF,SAAgB,sBAA8B;AAC5C,QAAO,KAAK,KAAK,YAAY,EAAE,qBAAqB;;;;;;AAOtD,SAAgB,mBAAiC;CAC/C,MAAM,WAAW,qBAAqB;AACtC,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO,EAAE,QAAQ,EAAE,EAAE;AAEvB,KAAI;EACF,MAAM,MAAM,aAAa,UAAU,QAAQ;AAC3C,SAAO,mBAAmB,MAAM,KAAK,MAAM,IAAI,CAAC;SAC1C;AACN,SAAO,EAAE,QAAQ,EAAE,EAAE;;;;;;;AAQzB,SAAgB,iBAAiB,OAA2B;CAC1D,MAAM,WAAW,qBAAqB;AAEtC,WADY,KAAK,QAAQ,SAAS,EACnB,EAAE,WAAW,MAAM,CAAC;AACnC,eAAc,UAAU,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,QAAQ;;;;;;;AAQlE,SAAgB,UAAU,OAAuB;AAC/C,QAAO,WAAW,SAAS,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;;;;;;;;;ACFzD,eAAsB,kBAAkB,SAAsB;CAC5D,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,eAAe,aAAa,EAAE,GAAG,YAAY;CAEnD,MAAM,iBAAiB,gBACrB,wBACD;CACD,MAAM,kBAAkB,gBACtB,yBACD;CACD,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAGxC,MAAM,oBAAoB,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACzE,MAAI;GACF,MAAM,EAAE,QAAQ,kBAAkB,MAAM,OAAO,wBAAwB;IACrE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,QAAQ,cAAc;WACvB,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CAEF,MAAM,iBAAgE,EAAE;AACxE,OAAM,QAAQ,IACZ,kBAAkB,IAAI,OAAO,aAAa;EACxC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAK,SAAS,aAAa,SAAS,KAAK,EAC1C,CAAC;AACF,iBAAe,SAAS,QAAQ;GAC9B;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;CAED,MAAM,QAAQ,kBAAkB;AAEhC,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,UAAU;EAChC,MAAM,YAAY,MAAM;EACxB,MAAM,WAAW,eAAe;AAEhC,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc;IACf,CAAC;YACO,SAAS,UAAU,YAAY,KACxC,WAAU,KAAK;IACb,cAAc;IACd,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;AAGJ,kBAAe,QAAQ,KAAK;IAC1B,MAAM;IACN;IACD,CAAC;AACF,UAAO,eAAe;QAEtB,gBAAe,QAAQ,KAAK;GAC1B,MAAM;GACN;GACD,CAAC;EAIJ,IAAI,kBAA4B,EAAE;AAClC,MAAI,SAiBF,oBAhBgB,MAAM,SAAS,OAAO,WAAW,gBAAgB;AAC/D,OAAI;IACF,MAAM,EAAE,SAAS,kBAAkB,MAAM,OAAO,yBAAyB;KACvE;KACA,wBAAwB;KACxB;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,SAAS,cAAc;YACxB,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER,EACwB,KAAK,MAAM,EAAE,KAAK;EAG9C,MAAM,cAAc,IAAI,IAAI,gBAAgB;AAG5C,OAAK,MAAM,UAAU,MAAM,QACzB,KAAI,YAAY,IAAI,OAAO,KAAK,EAAE;AAGhC,OAFoB,UAAU,OAAO,MAAM,KACxB,MAAM,OAAO,aAAa,OAAO,MAElD,iBAAgB,QAAQ,KAAK;IAC3B,MAAM,GAAG,UAAU,GAAG,OAAO;IAC7B,YAAY,OAAO;IACnB;IACA;IACA,OAAO,OAAO;IACf,CAAC;AAEJ,eAAY,OAAO,OAAO,KAAK;QAE/B,iBAAgB,QAAQ,KAAK;GAC3B,MAAM,GAAG,UAAU,GAAG,OAAO;GAC7B,YAAY,OAAO;GACnB;GACA;GACA,OAAO,OAAO;GACf,CAAC;AAKN,OAAK,MAAM,cAAc,YACvB,iBAAgB,QAAQ,KAAK;GAC3B,MAAM,GAAG,UAAU,GAAG;GACtB,YAAY;GACZ;GACA;GACD,CAAC;GAEJ,CACH;AAGD,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,eAAe,EAAE;AAC1D,MAAI,CAAC,MAAO;EACZ,MAAM,QAAQ,MAAM;AACpB,MAAI,SAAS,UAAU,YAAY,KACjC,gBAAe,IAAI,MAAM;AAE3B,MAAI,UAAU,YAAY,MAAM;GAE9B,MAAM,UAAU,MAAM,SAAS,OAAO,WAAW,gBAAgB;AAC/D,QAAI;KACF,MAAM,EAAE,oBAAS,kBAAkB,MAAM,OAAO,yBAAyB;MACvE;MACA,wBAAwB;MACxB;MACA,UAAU;MACX,CAAC;AACF,YAAO,CAACC,WAAS,cAAc;aACxB,OAAO;AACd,SAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,WAAM;;KAER;AACF,QAAK,MAAM,UAAU,QACnB,iBAAgB,QAAQ,KAAK;IAC3B,MAAM,GAAG,KAAK,GAAG,OAAO;IACxB,YAAY,OAAO;IACnB;IACA,WAAW;IACZ,CAAC;AAGJ,kBAAe,QAAQ,KAAK;IAC1B;IACA;IACD,CAAC;;;AAIN,gBAAe,OAAO;AACtB,iBAAgB,OAAO;AACvB,QAAO;EAAE;EAAgB;EAAiB;EAAW;EAAW;EAAgB;;AAGlF,SAAS,SAAS,aAAqB,MAAc;AACnD,QAAO,oBAAoB,YAAY,SAAS;;;;;;;;;;AAWlD,eAAsB,mBACpB,QACA,QACA,QAAyD,iBACzD,aACA;CACA,MAAM,EAAE,gBAAgB,oBAAoB;AAE5C,KAAI,UAAU,iBAAiB;AAE7B,QAAM,QAAQ,IACZ,eAAe,QAAQ,IAAI,OAAO,aAAW;AAC3C,SAAM,OAAO,yBAAyB;IACpC,aAAaC,SAAO;IACpB,wBAAwBA,SAAO;IAChC,CAAC;AACF,OAAI,aAAa;IACf,MAAM,cAAc,MAAM,iBACxB,SAASA,SAAO,aAAaA,SAAO,KAAK,EACzC,YAAY,KACb;AACD,UAAM,OAAO,YAAY,YAAY;;IAEvC,CACH;AAGD,MAAI,YACF,OAAM,QAAQ,IACZ,eAAe,QAAQ,IAAI,OAAO,WAAW;GAC3C,MAAM,cAAc,MAAM,iBACxB,SAAS,OAAO,aAAa,OAAO,KAAK,EACzC,YAAY,KACb;AACD,SAAM,OAAO,YAAY,YAAY;IACrC,CACH;AAIH,QAAM,QAAQ,IACZ,gBAAgB,QAAQ,KAAK,aAC3B,OAAO,0BAA0B;GAC/B,aAAaA,SAAO;GACpB,wBAAwBA,SAAO;GAC/B,yBAAyBA,SAAO;GAChC,0BAA0BA,SAAO;GAClC,CAAC,CACH,CACF;AAGD,QAAM,QAAQ,IACZ,gBAAgB,QAAQ,KAAK,WAC3B,OAAO,0BAA0B;GAC/B,aAAa,OAAO;GACpB,wBAAwB,OAAO;GAC/B,yBAAyB,OAAO;GAChC,0BAA0B,OAAO;GAClC,CAAC,CACH,CACF;AAGD,MAAI,aAAa;GACf,MAAM,QAAQ,kBAAkB;AAChC,QAAK,MAAM,SAAS,YAAY,SAAS;AACvC,QAAI,CAAC,MAAM,OAAO,MAAM,WACtB,OAAM,OAAO,MAAM,aAAa,EAAE;AAEpC,SAAK,MAAM,UAAU,MAAM,QACzB,OAAM,OAAO,MAAM,WAAW,OAAO,QAAQ,UAAU,OAAO,MAAM;;AAGxE,oBAAiB,MAAM;;YAEhB,UAAU,UAAU;AAE7B,QAAM,QAAQ,IACZ,gBAAgB,QAAQ,KAAK,QAC3B,OAAO,0BAA0B;GAC/B,aAAa,IAAI;GACjB,wBAAwB,IAAI;GAC5B,yBAAyB,IAAI;GAC9B,CAAC,CACH,CACF;AAGD,QAAM,QAAQ,IACZ,eAAe,QAAQ,KAAK,QAC1B,OAAO,yBAAyB;GAC9B,aAAa,IAAI;GACjB,wBAAwB,IAAI;GAC7B,CAAC,CACH,CACF;AAGD,MAAI,gBAAgB,QAAQ,SAAS,KAAK,eAAe,QAAQ,SAAS,GAAG;GAC3E,MAAM,QAAQ,kBAAkB;AAChC,QAAK,MAAM,OAAO,gBAAgB,QAChC,KAAI,MAAM,OAAO,IAAI,YAAY;AAC/B,WAAO,MAAM,OAAO,IAAI,WAAW,IAAI;AACvC,QAAI,OAAO,KAAK,MAAM,OAAO,IAAI,WAAW,CAAC,WAAW,EACtD,QAAO,MAAM,OAAO,IAAI;;AAI9B,QAAK,MAAM,OAAO,eAAe,QAC/B,QAAO,MAAM,OAAO,IAAI;AAE1B,oBAAiB,MAAM;;;;;;;;;;;;;;ACpV7B,eAAsB,mBACpB,QACA,QACA,QAAyD,iBACzD;CACA,MAAM,EAAE,cAAc;AACtB,KAAI,UAAU,gBAEZ,OAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,IAAI,OAAO,aAAW;AACzC,QAAM,OAAO,oBAAoBC,SAAO,QAAQ;AAChD,QAAM,OAAO,YAAYA,SAAO,YAAY;GAC5C,EACF,GAAG,UAAU,QAAQ,IAAI,OAAO,WAAW;AACzC,QAAM,OAAO,oBAAoB,OAAO,QAAQ;AAChD,QAAM,OAAO,YAAY,OAAO,YAAY;GAC5C,CACH,CAAC;UACO,UAAU,SAGnB,OAAM,QAAQ,IAAI,UAAU,QAAQ,KAAK,QAAQ,OAAO,oBAAoB,IAAI,QAAQ,CAAC,CAAC;;AAqB9F,SAASC,MAAI,aAAqB,MAAc;AAC9C,QAAO,oBAAoB,YAAY,iBAAiB;;;;;;;AAQ1D,eAAsB,kBAAkB,SAAsB;CAC5D,MAAM,EAAE,QAAQ,aAAa,aAAa,eAAe;CACzD,MAAM,YAAY,gBAChB,iBACD;CACD,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAGxC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,gBAAgB,kBAAkB,MAAM,OAAO,mBAAmB;IACxE;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,gBAAgB,cAAc;WAC/B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,mBAA6D,EAAE;AACrE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;EACnC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAKA,MAAI,aAAa,SAAS,KAAK,EACrC,CAAC;AACF,mBAAiB,SAAS,QAAQ;GAChC;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;CAED,MAAM,wBAAwB,aAAa,EAAE,GAAG,YAAY;AAC5D,MAAK,MAAM,kBAAkB,uBAAuB;EAClD,MAAM,SAAS;EACf,MAAM,OAAO,eAAe;EAC5B,MAAM,WAAW,iBAAiB;EAClC,MAAM,cAAc,MAAM,iBAAiBA,MAAI,aAAa,KAAK,EAAE,YAAY,KAAK;AAEpF,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc;IACf,CAAC;YACO,SAAS,UAAU,YAAY,KACxC,WAAU,KAAK;IACb,cAAc;IACd,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe;MACb;MACA,aAAa,OAAO,eAAe;MACnC,oBAAoB,OAAO,sBAAsB,EAAE;MACpD;KACF;IACD;IACD,CAAC;AACF,UAAO,iBAAiB;QAExB,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA,eAAe;KACb;KACA,aAAa,OAAO,eAAe;KACnC,oBAAoB,OAAO,sBAAsB,EAAE;KACpD;IACF;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,iBAAiB,CAAC,SAAS,CAAC,UAAU;EACnD,MAAM,QAAQ,iBAAiB,OAAO;AACtC,MAAI,SAAS,UAAU,YAAY,KACjC,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,YAAY,KACxB,WAAU,QAAQ,KAAK;GACrB;GACA,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,WAAU,OAAO;AACjB,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;;;;;;;AC1J5D,SAAS,mBAAmB,UAAqE;AAC/F,KAAI,OAAO,aAAa,YAAY,aAAa,KAAM,QAAO;AAC9D,KAAI,EAAE,eAAe,UAAW,QAAO;CAEvC,MAAM,YAAa,SAAoC;AACvD,KAAI,OAAO,cAAc,YAAY,cAAc,KAAM,QAAO;AAChE,KAAI,EAAE,eAAe,WAAY,QAAO;AAExC,QAAO,OAAQ,UAAqC,cAAc;;;;;;;;AASpE,SAAgB,4BACd,QACA,WAC2B;CAC3B,MAAM,SAAoC,EAAE;AAE5C,MAAK,MAAM,aAAa,OAAO,KAAK,OAAO,MAAM,EAAE,CAAC,EAAE;EACpD,MAAM,WAAW,OAAO,KAAK;AAC7B,MAAI,CAAC,mBAAmB,SAAS,CAAE;EAEnC,MAAM,gBAAgB,KAAK,QAAQ,WAAW,SAAS,UAAU,UAAU;AAC3E,SAAO,KAAK;GAAE;GAAW;GAAe,CAAC;;AAG3C,QAAO;;;;;;;;ACtCT,MAAa,0BAA0B;;;;;;AA8EvC,SAAgB,WAAW,MAA8B;AACvD,QAAO,KAAK,QAAQ,SAAS;;;;;;;AAQ/B,SAAgB,oBAAoB,MAA6B;AAC/D,KAAI,KAAK,QAAQ,WAAW,EAC1B,QAAO;CAGT,MAAM,QAAkB,EAAE;CAG1B,MAAM,gCAAgB,IAAI,KAA2B;AACrD,MAAK,MAAM,UAAU,KAAK,SAAS;EACjC,MAAM,WAAW,cAAc,IAAI,OAAO,SAAS,IAAI,EAAE;AACzD,WAAS,KAAK,OAAO;AACrB,gBAAc,IAAI,OAAO,UAAU,SAAS;;AAG9C,MAAK,MAAM,CAAC,UAAU,YAAY,eAAe;AAC/C,QAAM,KAAK,GAAG,KAAK,UAAU,GAAG,SAAS,GAAG;AAE5C,OAAK,MAAM,UAAU,QACnB,OAAM,KAAK,iBAAiB,OAAO,CAAC;;AAIxC,QAAO,MAAM,KAAK,KAAK;;;;;;;AAQzB,SAAS,iBAAiB,QAA4B;AACpD,SAAQ,OAAO,MAAf;EACE,KAAK,aACH,QAAO,cAAc,OAAO,SAAS;EACvC,KAAK,eACH,QAAO,cAAc,OAAO,SAAS;EACvC,KAAK,gBACH,QAAO,cAAc,OAAO,SAAS,IAAI,OAAO;EAClD,KAAK,eAAe;GAClB,MAAM,QAAQ,OAAO;GACrB,MAAM,UAAU,gBAAgB,MAAM;AACtC,UAAO,OAAO,OAAO,UAAU,IAAI;;EAErC,KAAK,iBAAiB;GACpB,MAAM,QAAQ,OAAO;AACrB,UAAO,OAAO,OAAO,UAAU,IAAI,MAAM;;EAE3C,KAAK,kBAAkB;GACrB,MAAM,SAAS,OAAO;GACtB,MAAM,QAAQ,OAAO;AACrB,UAAO,OAAO,OAAO,UAAU,IAAI,wBAAwB,QAAQ,MAAM;;EAE3E,KAAK,cACH,QAAO,eAAe,OAAO;EAC/B,KAAK,gBACH,QAAO,eAAe,OAAO;EAC/B,KAAK,iBACH,QAAO,eAAe,OAAO,UAAU,IAAI,OAAO,UAAU;EAC9D,KAAK,aACH,QAAO,cAAc,OAAO;EAC9B,KAAK,eACH,QAAO,cAAc,OAAO;EAC9B,KAAK,gBACH,QAAO,cAAc,OAAO,UAAU,IAAI,OAAO,UAAU;EAC7D,KAAK,qBACH,QAAO,oBAAoB,OAAO,mBAAmB,KAAK,OAAO,iBAAiB,KAAK,GAAG,IAAI,OAAO;EACvG,KAAK,uBACH,QAAO,oBAAoB,OAAO,mBAAmB,KAAK,OAAO,iBAAiB,KAAK,GAAG,IAAI,OAAO;EACvG,KAAK,wBACH,QAAO,oBAAoB,OAAO,mBAAmB,KAAK,OAAO,iBAAiB,KAAK,GAAG,IAAI,OAAO,iBAAiB,IAAI,OAAO,UAAU;EAC7I,KAAK,sBACH,QAAO,oBAAoB,OAAO,UAAU;EAC9C,QACE,QAAO,OAAO,OAAO,SAAS,GAAG,OAAO,aAAa;;;;;;;;AAS3D,SAAS,gBAAgB,OAAoC;CAC3D,IAAI,OAAO,MAAM;AACjB,KAAI,MAAM,MAAO,SAAQ;AACzB,KAAI,MAAM,SAAU,SAAQ;KACvB,SAAQ;AACb,QAAO;;;;;;;;AAST,SAAS,wBAAwB,QAA6B,OAAoC;CAChG,MAAM,UAAoB,EAAE;AAE5B,KAAI,OAAO,SAAS,MAAM,KACxB,SAAQ,KAAK,SAAS,OAAO,KAAK,KAAK,MAAM,OAAO;AAEtD,KAAI,OAAO,aAAa,MAAM,SAC5B,SAAQ,KAAK,aAAa,OAAO,SAAS,KAAK,MAAM,WAAW;AAElE,KAAI,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM,MAAM,CAChD,SAAQ,KAAK,UAAU,OAAO,SAAS,MAAM,KAAK,MAAM,SAAS,QAAQ;AAE3E,KAAI,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM,MAAM,CAChD,SAAQ,KAAK,UAAU,OAAO,SAAS,MAAM,KAAK,MAAM,SAAS,QAAQ;AAE3E,KAAI,QAAQ,OAAO,OAAO,KAAK,QAAQ,MAAM,OAAO,CAClD,SAAQ,KAAK,WAAW,OAAO,UAAU,MAAM,KAAK,MAAM,UAAU,QAAQ;AAE9E,KAAI,QAAQ,OAAO,OAAO,KAAK,QAAQ,MAAM,OAAO,CAClD,SAAQ,KAAK,WAAW,OAAO,UAAU,MAAM,KAAK,MAAM,UAAU,QAAQ;CAG9E,MAAM,gBAAgB,OAAO,iBAAiB,EAAE;CAChD,MAAM,eAAe,MAAM,iBAAiB,EAAE;CAC9C,MAAM,WAAW,IAAI,IAAI,aAAa,KAAK,MAAM,EAAE,MAAM,CAAC;AAI1D,KAFE,cAAc,WAAW,aAAa,UACtC,cAAc,MAAM,MAAM,CAAC,SAAS,IAAI,EAAE,MAAM,CAAC,EACvB;EAC1B,MAAM,eAAe,cAAc,KAAK,MAAM,EAAE,MAAM,CAAC,KAAK,KAAK;EACjE,MAAM,cAAc,aAAa,KAAK,MAAM,EAAE,MAAM,CAAC,KAAK,KAAK;AAC/D,UAAQ,KAAK,mBAAmB,aAAa,OAAO,YAAY,GAAG;;CAGrE,MAAM,cAAc,OAAO;CAC3B,MAAM,aAAa,MAAM;AACzB,MACG,aAAa,QAAQ,QAAQ,SAAS,YAAY,QAAQ,QAAQ,QAClE,aAAa,QAAQ,QAAQ,SAAS,YAAY,QAAQ,QAAQ,IAEnE,SAAQ,KAAK,iBAAiB;CAGhC,MAAM,iBAAiB,OAAO,YAAY,EAAE;CAC5C,MAAM,gBAAgB,MAAM,YAAY,EAAE;AAC1C,KAAI,eAAe,WAAW,cAAc,OAC1C,SAAQ,KAAK,gBAAgB,eAAe,OAAO,KAAK,cAAc,SAAS;AAGjF,KAAI,QAAQ,OAAO,OAAO,KAAK,QAAQ,MAAM,OAAO,CAClD,SAAQ,KACN,WAAW,OAAO,SAAS,YAAY,WAAW,KAAK,MAAM,SAAS,YAAY,aACnF;AAGH,QAAO,QAAQ,KAAK,KAAK;;;;;;;AAQ3B,SAAgB,sBAAsB,iBAA+C;AACnF,KAAI,gBAAgB,WAAW,EAC7B,QAAO;CAGT,MAAM,QAAkB,CAAC,8BAA8B,GAAG;AAE1D,MAAK,MAAM,MAAM,iBAAiB;EAChC,MAAM,WAAW,GAAG,YAAY,GAAG,GAAG,SAAS,GAAG,GAAG,cAAc,GAAG;AACtE,QAAM,KAAK,OAAO,SAAS,IAAI,GAAG,SAAS;;AAG7C,QAAO,MAAM,KAAK,KAAK;;AAGzB,MAAM,qBAAqD;CACzD,YAAY;CACZ,cAAc;CACd,eAAe;CACf,aAAa;CACb,eAAe;CACf,gBAAgB;CAChB,aAAa;CACb,eAAe;CACf,gBAAgB;CAChB,YAAY;CACZ,cAAc;CACd,eAAe;CACf,oBAAoB;CACpB,sBAAsB;CACtB,uBAAuB;CACvB,qBAAqB;CACtB;;;;;;AAOD,SAAgB,kBAAkB,MAA6B;CAC7D,MAAM,QAAiD,EAAE;AACzD,MAAK,MAAM,UAAU,KAAK,QACxB,OAAM,OAAO,SAAS,MAAM,OAAO,SAAS,KAAK;CAGnD,MAAM,QAAQ,OAAO,KAAK,MAAM,CAAC,KAC9B,SAAS,GAAG,MAAM,MAAwB,GAAG,mBAAmB,QAClE;AAED,QAAO,MAAM,SAAS,IAAI,MAAM,KAAK,KAAK,GAAG;;;;;;;;;;;AC5R/C,MAAa,wBAAwB;;;;AAKrC,MAAa,mBAAmB;AAChC,MAAa,iBAAiB;AAC9B,MAAa,oBAAoB;AACjC,MAAa,qBAAqB;;;;;AAMlC,MAAa,2BAA2B;;;;;;AAuOxC,SAAgB,uBAAuB,WAA4B;AACjE,QAAO,yBAAyB,KAAK,UAAU;;;;;;;AAQjD,SAAgB,sBAAsB,KAAqB;AACzD,QAAO,IAAI,UAAU,CAAC,SAAS,GAAG,IAAI;;;;;AAsBxC,MAAM,uBAA0D;CAC9D,QAAQ;CACR,MAAM;CACN,SAAS;CACT,IAAI;CACL;;;;;;;AAQD,SAAgB,oBAAoB,eAAuB,KAAqB;CAC9E,MAAM,SAAS,sBAAsB,IAAI;AACzC,QAAO,KAAK,KAAK,eAAe,OAAO;;;;;;;;;AAUzC,SAAgB,qBACd,eACA,KACA,MACQ;CACR,MAAM,eAAe,oBAAoB,eAAe,IAAI;AAC5D,QAAO,KAAK,KAAK,cAAc,qBAAqB,MAAM;;;;;;;AAY5D,SAAS,0BAA0B,OAAyC;CAG1E,MAAM,SAA8B;EAClC,MAAM,MAAM,OAAO;EACnB,UAAU,MAAM,OAAO,aAAa;EACrC;AAED,KAAI,MAAM,OAAO,MAAO,QAAO,QAAQ;AACvC,KAAI,MAAM,OAAO,MAAO,QAAO,QAAQ;AACvC,KAAI,MAAM,OAAO,OAAQ,QAAO,SAAS;AAEzC,KAAI,MAAM,OAAO,iBAAiB,MAAM,OAAO,cAAc,SAAS,EACpE,QAAO,gBAAgB,MAAM,OAAO,cAAc,KAAK,OAAO;EAC5D,OAAO,EAAE;EACT,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,aAAa;EACpD,EAAE;AAGL,KAAI,MAAM,OAAO,YAAY;AAC3B,SAAO,aAAa;AACpB,MAAI,MAAM,OAAO,eAAgB,QAAO,iBAAiB,MAAM,OAAO;AACtE,MAAI,MAAM,OAAO,gBAAiB,QAAO,kBAAkB,MAAM,OAAO;;AAG1E,KAAI,MAAM,OAAO,YAAa,QAAO,cAAc,MAAM,OAAO;AAChE,KAAI,MAAM,OAAO,OAAQ,QAAO,SAAS;AAEzC,KAAI,MAAM,OAAO,OAAO;AACtB,SAAO,QAAQ,EAAE;AACjB,MAAI,MAAM,OAAO,MAAM,OACrB,QAAO,MAAM,SAAS,EAAE,MAAM,MAAM,OAAO,MAAM,OAAO,MAAM;AAEhE,MAAI,MAAM,OAAO,MAAM,OACrB,QAAO,MAAM,SAAS,EAAE,MAAM,MAAM,OAAO,MAAM,OAAO,MAAM;;AAIlE,KAAI,MAAM,OAAO,YAAY,MAAM,OAAO,SAAS,SAAS,EAC1D,QAAO,WAAW,MAAM,OAAO,SAAS,KAAK,OAAO;EAClD,QAAQ,EAAE,MAAM,EAAE,OAAO,MAAM;EAC/B,cAAc,EAAE;EACjB,EAAE;AAGL,KAAI,MAAM,OAAO,OACf,QAAO,SAAS;EACd,OAAO,MAAM,OAAO,OAAO;EAC3B,GAAI,MAAM,OAAO,OAAO,aAAa,UAAa,EAAE,UAAU,MAAM,OAAO,OAAO,UAAU;EAC5F,GAAI,MAAM,OAAO,OAAO,UAAU,EAAE,QAAQ,MAAM,OAAO,OAAO,QAAQ;EACzE;AAGH,KAAI,MAAM,OAAO,UAAU,OAAW,QAAO,QAAQ,MAAM,OAAO;AAElE,KAAI,MAAM,OAAO,UAAU,OAAO,KAAK,MAAM,OAAO,OAAO,CAAC,SAAS,GAAG;AACtE,SAAO,SAAS,EAAE;AAClB,OAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,MAAM,OAAO,OAAO,CAC1E,QAAO,OAAO,cAAc,4CAA4C,aAAa;;AAIzF,QAAO;;;;;;;AAQT,SAAS,4CACP,aACqB;CACrB,MAAM,SAA8B;EAClC,MAAM,YAAY;EAClB,UAAU,YAAY,aAAa;EACpC;AAED,KAAI,YAAY,MAAO,QAAO,QAAQ;AACtC,KAAI,YAAY,MAAO,QAAO,QAAQ;AACtC,KAAI,YAAY,OAAQ,QAAO,SAAS;AAExC,KAAI,YAAY,iBAAiB,YAAY,cAAc,SAAS,EAClE,QAAO,gBAAgB,YAAY,cAAc,KAAK,OAAO;EAC3D,OAAO,EAAE;EACT,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,aAAa;EACpD,EAAE;AAGL,KAAI,YAAY,YAAY;AAC1B,SAAO,aAAa;AACpB,MAAI,YAAY,eAAgB,QAAO,iBAAiB,YAAY;AACpE,MAAI,YAAY,gBAAiB,QAAO,kBAAkB,YAAY;;AAGxE,KAAI,YAAY,YAAa,QAAO,cAAc,YAAY;AAC9D,KAAI,YAAY,OAAQ,QAAO,SAAS;AAExC,KAAI,YAAY,OAAO;AACrB,SAAO,QAAQ,EAAE;AACjB,MAAI,YAAY,MAAM,OACpB,QAAO,MAAM,SAAS,EAAE,MAAM,YAAY,MAAM,OAAO,MAAM;AAE/D,MAAI,YAAY,MAAM,OACpB,QAAO,MAAM,SAAS,EAAE,MAAM,YAAY,MAAM,OAAO,MAAM;;AAIjE,KAAI,YAAY,YAAY,YAAY,SAAS,SAAS,EACxD,QAAO,WAAW,YAAY,SAAS,KAAK,OAAO;EACjD,QAAQ,EAAE,MAAM,EAAE,OAAO,MAAM;EAC/B,cAAc,EAAE;EACjB,EAAE;AAGL,KAAI,YAAY,OACd,QAAO,SAAS;EACd,OAAO,YAAY,OAAO;EAC1B,GAAI,YAAY,OAAO,aAAa,UAAa,EAAE,UAAU,YAAY,OAAO,UAAU;EAC1F,GAAI,YAAY,OAAO,UAAU,EAAE,QAAQ,YAAY,OAAO,QAAQ;EACvE;AAGH,KAAI,YAAY,UAAU,OAAW,QAAO,QAAQ,YAAY;AAGhE,KAAI,YAAY,UAAU,OAAO,KAAK,YAAY,OAAO,CAAC,SAAS,GAAG;AACpE,SAAO,SAAS,EAAE;AAClB,OAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,YAAY,OAAO,CACzE,QAAO,OAAO,cAAc,4CAA4C,aAAa;;AAIzF,QAAO;;;;;;;AAQT,SAAS,mBAAmB,MAAkC;CAC5D,MAAM,SAA8C,EAAE;AAEtD,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,KAAK,OAAO,CAC1D,QAAO,aAAa,0BAA0B,MAAM;CAGtD,MAAM,eAA6B;EACjC,MAAM,KAAK;EACX;EACD;AAED,KAAI,KAAK,WAAY,cAAa,aAAa,KAAK;AACpD,KAAI,KAAK,YAAa,cAAa,cAAc,KAAK;AACtD,KAAI,KAAK,UAAU;AACjB,eAAa,WAAW,EAAE;AAC1B,MAAI,KAAK,SAAS,gBAAgB,OAChC,cAAa,SAAS,cAAc,KAAK,SAAS;AAEpD,MAAI,KAAK,SAAS,eAAe,OAC/B,cAAa,SAAS,aAAa,KAAK,SAAS;AAEnD,MAAI,KAAK,SAAS,eAAe;GAE/B,MAAM,MAAM,KAAK,SAAS;AAC1B,gBAAa,SAAS,gBAAgB;IACpC,GAAI,IAAI,WAAW,UAAa,EAC9B,QAAQ,IAAI,QACb;IACD,GAAI,IAAI,WAAW,UAAa,EAC9B,QAAQ,IAAI,QACb;IACD,GAAI,IAAI,WAAW,UAAa,EAC9B,QAAQ,IAAI,QACb;IACD,GAAI,IAAI,SAAS,UAAa,EAC5B,MAAM,IAAI,MACX;IACF;;AAEH,MAAI,KAAK,SAAS,kBAAkB,OAClC,cAAa,SAAS,gBAAgB,KAAK,SAAS;;AAIxD,KAAI,KAAK,WAAW,OAAO,KAAK,KAAK,QAAQ,CAAC,SAAS,GAAG;AACxD,eAAa,UAAU,EAAE;AACzB,OAAK,MAAM,CAAC,WAAW,gBAAgB,OAAO,QAAQ,KAAK,QAAQ,CACjE,cAAa,QAAQ,aAAa;GAChC,QAAQ,YAAY;GACpB,QAAQ,YAAY;GACrB;;AAIL,KAAI,KAAK,SAAS,OAAO,KAAK,KAAK,MAAM,CAAC,SAAS,EACjD,cAAa,QAAQ,EAAE,GAAG,KAAK,OAAO;AAGxC,KAAI,OAAO,KAAK,KAAK,qBAAqB,CAAC,SAAS,GAAG;AACrD,eAAa,uBAAuB,EAAE;AACtC,OAAK,MAAM,CAAC,SAAS,QAAQ,OAAO,QAAQ,KAAK,qBAAqB,CACpE,cAAa,qBAAqB,WAAW;GAC3C,YAAY,IAAI;GAChB,aAAa,IAAI;GACjB,aAAa,IAAI;GACjB,SAAS,IAAI;GACb,aAAa,IAAI;GAClB;;AAIL,KAAI,OAAO,KAAK,KAAK,sBAAsB,CAAC,SAAS,GAAG;AACtD,eAAa,wBAAwB,EAAE;AACvC,OAAK,MAAM,CAAC,SAAS,QAAQ,OAAO,QAAQ,KAAK,sBAAsB,CACrE,cAAa,sBAAsB,WAAW;GAC5C,YAAY,IAAI;GAChB,aAAa,IAAI;GACjB,aAAa,IAAI;GACjB,SAAS,IAAI;GACb,aAAa,IAAI;GAClB;;AAIL,KAAI,KAAK,YAAY,UAAU,KAAK,YAAY,KAAK;AACnD,eAAa,cAAc,EAAE;AAE7B,MAAI,KAAK,YAAY,OACnB,cAAa,YAAY,SAAS;GAChC,QAAQ,KAAK,YAAY,OAAO,OAAO,IAAI,wBAAwB;GACnE,MAAM,KAAK,YAAY,OAAO,KAAK,IAAI,wBAAwB;GAC/D,QAAQ,KAAK,YAAY,OAAO,OAAO,IAAI,wBAAwB;GACnE,QAAQ,KAAK,YAAY,OAAO,OAAO,IAAI,wBAAwB;GACpE;AAGH,MAAI,KAAK,YAAY,IACnB,cAAa,YAAY,MAAM,KAAK,YAAY,IAAI,KAAK,YAAY;GACnE,YAAY,OAAO;GACnB,SAAS,OAAO;GAChB,QAAQ,OAAO;GACf,GAAI,OAAO,eAAe,EAAE,aAAa,OAAO,aAAa;GAC9D,EAAE;;AAIP,QAAO;;;;;;;AAQT,SAAS,wBACP,YAC0B;AAC1B,QAAO;EACL,YAAY,WAAW;EACvB,QAAQ,WAAW;EACnB,GAAI,WAAW,eAAe,EAAE,aAAa,WAAW,aAAa;EACtE;;;;;;;;AASH,SAAgB,6BACd,OACA,WACgB;CAChB,MAAM,gBAA8C,EAAE;AAEtD,MAAK,MAAM,CAAC,UAAU,SAAS,OAAO,QAAQ,MAAM,CAClD,eAAc,YAAY,mBAAmB,KAAK;AAGpD,QAAO;EACL,SAAS;EACT;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,OAAO;EACR;;;;;;;AAYH,SAAgB,aAAa,UAAkC;CAC7D,MAAM,UAAUC,KAAG,aAAa,UAAU,QAAQ;AAClD,QAAO,KAAK,MAAM,QAAQ;;;;;;;AAQ5B,SAAgB,SAAS,UAAiC;CACxD,MAAM,UAAUA,KAAG,aAAa,UAAU,QAAQ;AAClD,QAAO,KAAK,MAAM,QAAQ;;;;;;;AAQ5B,SAAgB,kBACd,eAC6D;AAC7D,KAAI,CAACA,KAAG,WAAW,cAAc,CAC/B,QAAO,EAAE;CAGX,MAAM,UAAUA,KAAG,YAAY,eAAe,EAAE,eAAe,MAAM,CAAC;CACtE,MAAM,aAIA,EAAE;AAER,MAAK,MAAM,SAAS,SAAS;AAE3B,MAAI,CAAC,MAAM,aAAa,CAAE;AAC1B,MAAI,CAAC,uBAAuB,MAAM,KAAK,CAAE;EAEzC,MAAM,MAAM,SAAS,MAAM,MAAM,GAAG;EACpC,MAAM,eAAe,KAAK,KAAK,eAAe,MAAM,KAAK;EAGzD,MAAM,aAAa,KAAK,KAAK,cAAc,iBAAiB;AAC5D,MAAIA,KAAG,WAAW,WAAW,CAC3B,YAAW,KAAK;GACd,QAAQ;GACR,MAAM;GACN,MAAM;GACP,CAAC;EAIJ,MAAM,WAAW,KAAK,KAAK,cAAc,eAAe;AACxD,MAAIA,KAAG,WAAW,SAAS,CACzB,YAAW,KAAK;GACd,QAAQ;GACR,MAAM;GACN,MAAM;GACP,CAAC;;AAKN,YAAW,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO;AAC9C,QAAO;;;;;;;;AAST,SAAgB,uBAAuB,eAA+B;CACpE,MAAM,QAAQ,kBAAkB,cAAc;AAC9C,KAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,QAAO,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,OAAO,CAAC,GAAG;;;;;;;;AASnD,SAAS,oBAAoB,UAA0B,MAAqC;CAC1F,MAAM,QAAQ,EAAE,GAAG,SAAS,OAAO;AAEnC,MAAK,MAAM,UAAU,KAAK,QACxB,SAAQ,OAAO,MAAf;EACE,KAAK;AACH,SAAM,OAAO,YAAY,OAAO;AAChC;EACF,KAAK;AACH,UAAO,MAAM,OAAO;AACpB;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,OAAO;IAC1C,MAAM,QAAQ,OAAO;AAIrB,UAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,GAAI,MAAM,YAAY,UAAa,EAAE,SAAS,MAAM,SAAS;KAC7D,GAAI,MAAM,UAAU,UAAa,EAAE,OAAO,MAAM,OAAO;KACxD;;AAEH;EACF,KAAK;EACL,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,UACnC,OAAM,OAAO,YAAY;IACvB,GAAG,MAAM,OAAO;IAChB,QAAQ;KACN,GAAG,MAAM,OAAO,UAAU;MACzB,OAAO,YAAY,OAAO;KAC5B;IACF;AAEH;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,WAAW;IAC9C,MAAM,GAAG,OAAO,YAAY,GAAG,GAAG,oBAAoB,MAAM,OAAO,UAAU;AAC7E,UAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,QAAQ;KACT;;AAEH;EACF,KAAK;EACL,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,UACnC,OAAM,OAAO,YAAY;IACvB,GAAG,MAAM,OAAO;IAChB,SAAS;KACP,GAAG,MAAM,OAAO,UAAU;MACzB,OAAO,YAAY,OAAO;KAC5B;IACF;AAEH;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,aAAa,MAAM,OAAO,UAAU,SAAS;IAChF,MAAM,GAAG,OAAO,YAAY,GAAG,GAAG,qBAAqB,MAAM,OAAO,UAAU;AAC9E,UAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,SAAS,OAAO,KAAK,iBAAiB,CAAC,SAAS,IAAI,mBAAmB;KACxE;;AAEH;EACF,KAAK;EACL,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,UACnC,OAAM,OAAO,YAAY;IACvB,GAAG,MAAM,OAAO;IAChB,OAAO;KACL,GAAG,MAAM,OAAO,UAAU;MACzB,OAAO,YAAY,OAAO;KAC5B;IACF;AAEH;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,aAAa,MAAM,OAAO,UAAU,OAAO;IAC9E,MAAM,GAAG,OAAO,YAAY,GAAG,GAAG,mBAAmB,MAAM,OAAO,UAAU;AAC5E,UAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,OAAO,OAAO,KAAK,eAAe,CAAC,SAAS,IAAI,iBAAiB;KAClE;;AAEH;EACF,KAAK;EACL,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,kBAAkB;IACrD,MAAM,MAAM,OAAO;AAUnB,SAPE,OAAO,qBACN,MAAM,OAAO,UAAU,uBAAuB,OAAO,oBAClD,YACA,MAAM,OAAO,UAAU,wBAAwB,OAAO,oBACpD,aACA,gBAEW,UACjB,OAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,sBAAsB;MACpB,GAAG,MAAM,OAAO,UAAU;OACzB,OAAO,mBAAmB;MAC5B;KACF;QAED,OAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,uBAAuB;MACrB,GAAG,MAAM,OAAO,UAAU;OACzB,OAAO,mBAAmB;MAC5B;KACF;;AAGL;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,kBAAkB;IACrD,MAAM,OAAO,MAAM,OAAO;IAE1B,MAAM,aACJ,OAAO,qBACN,KAAK,uBAAuB,OAAO,oBAChC,YACA,KAAK,wBAAwB,OAAO,oBAClC,aACA;AAER,QAAI,eAAe,aAAa,KAAK,uBAAuB,OAAO,mBAAmB;KACpF,MAAM,GAAG,OAAO,mBAAmB,GAAG,GAAG,cAAc,KAAK;AAC5D,WAAM,OAAO,YAAY;MACvB,GAAG;MACH,sBAAsB,OAAO,KAAK,UAAU,CAAC,SAAS,IAAI,YAAY;MACvE;eAED,eAAe,cACf,KAAK,wBAAwB,OAAO,mBACpC;KACA,MAAM,GAAG,OAAO,mBAAmB,GAAG,GAAG,cAAc,KAAK;AAC5D,WAAM,OAAO,YAAY;MACvB,GAAG;MACH,uBAAuB,OAAO,KAAK,UAAU,CAAC,SAAS,IAAI,YAAY;MACxE;;;AAGL;EACF,KAAK;AACH,OAAI,MAAM,OAAO,aAAa,OAAO,OAAO;IAC1C,MAAM,QAAQ,OAAO;AAIrB,UAAM,OAAO,YAAY;KACvB,GAAG,MAAM,OAAO;KAChB,aAAa;MACX,QAAQ,MAAM;MACd,KAAK,MAAM;MACZ;KACF;;AAEH;;AAIN,QAAO;EACL,GAAG;EACH;EACA,WAAW,KAAK;EACjB;;;;;;;;;AAUH,SAAgB,kCACd,eACA,YACuB;CACvB,MAAM,QAAQ,kBAAkB,cAAc;AAC9C,KAAI,MAAM,WAAW,EAAG,QAAO;CAG/B,MAAM,aAAa,MAAM,MAAM,MAAM,EAAE,SAAS,YAAY,EAAE,WAAW,sBAAsB;AAC/F,KAAI,CAAC,WACH,OAAM,IAAI,MACR,mCAAmC,cAAc,aAAa,sBAC5D,sBACD,CAAC,cACH;CAGH,IAAI,WAAW,aAAa,WAAW,KAAK;AAG5C,MAAK,MAAM,QAAQ,MACjB,KAAI,KAAK,SAAS,UAAU,KAAK,SAAS,WAAW,QAAQ;AAE3D,MAAI,eAAe,UAAa,KAAK,SAAS,WAC5C;EAEF,MAAM,OAAO,SAAS,KAAK,KAAK;AAChC,aAAW,oBAAoB,UAAU,KAAK;;AAIlD,QAAO;;;;;;;;AAST,SAAgB,yBAAyB,eAA+B;CACtE,MAAM,QAAQ,kBAAkB,cAAc;AAC9C,KAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,QAAO,KAAK,IAAI,GAAG,MAAM,KAAK,MAAM,EAAE,OAAO,CAAC;;;;;;;;AAahD,SAAS,mBAAmB,UAA+B,UAAwC;AAEjG,KAAI,SAAS,SAAS,SAAS,KAAM,QAAO;AAC5C,KAAI,SAAS,aAAa,SAAS,SAAU,QAAO;AAIpD,MAAK,MAAM,QADU;EAAC;EAAS;EAAS;EAAU;EAAc;EAAS,CAEvE,MAAK,SAAS,SAAS,YAAY,SAAS,SAAS,OAAQ,QAAO;AAItE,KAAI,SAAS,mBAAmB,SAAS,eAAgB,QAAO;AAChE,KAAI,SAAS,oBAAoB,SAAS,gBAAiB,QAAO;AAElE,MAAK,SAAS,eAAe,SAAS,SAAS,eAAe,IAAK,QAAO;CAE1E,MAAM,aAAa,SAAS,iBAAiB,EAAE;CAC/C,MAAM,aAAa,SAAS,iBAAiB,EAAE;AAC/C,KAAI,WAAW,WAAW,WAAW,OAAQ,QAAO;CACpD,MAAM,gBAAgB,IAAI,IAAI,WAAW,KAAK,MAAM,CAAC,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;AAC9E,MAAK,MAAM,KAAK,YAAY;AAC1B,MAAI,CAAC,cAAc,IAAI,EAAE,MAAM,CAAE,QAAO;AACxC,OAAK,EAAE,eAAe,SAAS,cAAc,IAAI,EAAE,MAAM,IAAI,IAAK,QAAO;;CAG3E,MAAM,WAAW,SAAS;CAC1B,MAAM,WAAW,SAAS;AAC1B,KAAI,QAAQ,SAAS,KAAK,QAAQ,SAAS,CAAE,QAAO;AACpD,KAAI,YAAY,UAAU;AACxB,OAAK,SAAS,QAAQ,QAAQ,SAAS,SAAS,QAAQ,QAAQ,IAAK,QAAO;AAC5E,OAAK,SAAS,QAAQ,QAAQ,SAAS,SAAS,QAAQ,QAAQ,IAAK,QAAO;;CAG9E,MAAM,cAAc,SAAS,YAAY,EAAE;CAC3C,MAAM,cAAc,SAAS,YAAY,EAAE;AAC3C,KAAI,YAAY,WAAW,YAAY,OAAQ,QAAO;AACtD,MAAK,IAAI,IAAI,GAAG,IAAI,YAAY,QAAQ,KAAK;AAC3C,MAAI,YAAY,GAAG,OAAO,SAAS,YAAY,GAAG,OAAO,KAAM,QAAO;AACtE,MAAI,YAAY,GAAG,iBAAiB,YAAY,GAAG,aAAc,QAAO;;CAG1E,MAAM,YAAY,SAAS;CAC3B,MAAM,YAAY,SAAS;AAC3B,KAAI,QAAQ,UAAU,KAAK,QAAQ,UAAU,CAAE,QAAO;AACtD,KAAI,aAAa,WAAW;AAC1B,MAAI,UAAU,UAAU,UAAU,MAAO,QAAO;AAChD,MAAI,UAAU,aAAa,UAAU,SAAU,QAAO;AACtD,OAAK,UAAU,UAAU,SAAS,UAAU,UAAU,IAAK,QAAO;;AAGpE,KAAI,SAAS,UAAU,SAAS,MAAO,QAAO;CAE9C,MAAM,YAAY,SAAS,UAAU,EAAE;CACvC,MAAM,YAAY,SAAS,UAAU,EAAE;CACvC,MAAM,gBAAgB,OAAO,KAAK,UAAU;CAC5C,MAAM,gBAAgB,OAAO,KAAK,UAAU;AAC5C,KAAI,cAAc,WAAW,cAAc,OAAQ,QAAO;AAC1D,MAAK,MAAM,aAAa,eAAe;AACrC,MAAI,CAAC,UAAU,WAAY,QAAO;AAClC,MAAI,mBAAmB,UAAU,YAAY,UAAU,WAAW,CAAE,QAAO;;AAG7E,QAAO;;;;;;;;;;AAWT,SAAS,sBACP,UACA,WACA,UACA,UAC2B;AAE3B,KAAI,CAAC,YAAY,YAAY,SAAS,SACpC,QAAO;EACL;EACA;EACA,QAAQ;EACT;AAIH,KAAI,YAAY,YAAY,SAAS,SAAS,SAAS,KACrD,QAAO;EACL;EACA;EACA,QAAQ,2BAA2B,SAAS,KAAK,MAAM,SAAS;EAChE,aAAa;EACb,mBAAmB;EACpB;AAIH,KAAI,YAAY,YAAY,CAAC,SAAS,YAAY,SAAS,SACzD,QAAO;EACL;EACA;EACA,QAAQ;EACT;AAIH,KAAI,YAAY,aAAa,SAAS,SAAS,YAAY,SAAS,SAAS,QAAQ;EACnF,MAAM,CAAC,UAAU,UAAU,SAAS,QAChC,CAAC,SAAS,eAAe,GACzB,CAAC,gBAAgB,QAAQ;AAC7B,SAAO;GACL;GACA;GACA,QAAQ,sBAAsB,SAAS,MAAM;GAC7C,aAAa;GACb,mBAAmB;GACpB;;AAIH,KAAI,YAAY,UAAU;EACxB,MAAM,oBAAoB,SAAS;EACnC,MAAM,oBAAoB,SAAS;AACnC,MAAI,qBAAqB,qBAAqB,sBAAsB,kBAClE,QAAO;GACL;GACA;GACA,QAAQ,wCAAwC,kBAAkB,MAAM;GACzE;;AAKL,KAAI,YAAY,YAAY,EAAE,SAAS,UAAU,WAAW,SAAS,UAAU,OAC7E,QAAO;EACL;EACA;EACA,QAAQ;EACT;AAIH,KAAI,YAAY,YAAY,SAAS,SAAS,UAAU,SAAS,SAAS,QAAQ;EAChF,MAAM,aAAa,SAAS,iBAAiB,EAAE;EAC/C,MAAM,aAAa,SAAS,iBAAiB,EAAE;EAC/C,MAAM,YAAY,WAAW,KAAK,MAAM,EAAE,MAAM;EAChD,MAAM,eAAe,IAAI,IAAI,WAAW,KAAK,MAAM,EAAE,MAAM,CAAC;EAC5D,MAAM,gBAAgB,UAAU,QAAQ,MAAM,CAAC,aAAa,IAAI,EAAE,CAAC;AACnE,MAAI,cAAc,SAAS,EACzB,QAAO;GACL;GACA;GACA,QAAQ,wBAAwB,cAAc,KAAK,KAAK;GACzD;;AAIL,QAAO;;AAWT,SAAS,UACP,KACA,QACA,UACA,UACM;AACN,KAAI,QAAQ,KAAK,OAAO;AAExB,KAAI,OAAO,WAAW;EACpB,MAAM,WAAW,sBAAsB,OAAO,UAAU,OAAO,WAAW,UAAU,SAAS;AAC7F,MAAI,SACF,KAAI,gBAAgB,KAAK,SAAS;;;AAKxC,SAAS,kBACP,KACA,UACA,UACA,UACM;CACN,MAAM,iBAAiB,IAAI,IAAI,OAAO,KAAK,SAAS,OAAO,CAAC;CAC5D,MAAM,iBAAiB,IAAI,IAAI,OAAO,KAAK,SAAS,OAAO,CAAC;AAG5D,MAAK,MAAM,aAAa,eACtB,KAAI,CAAC,eAAe,IAAI,UAAU,CAChC,WACE,KACA;EACE,MAAM;EACN;EACA;EACA,OAAO,SAAS,OAAO;EACxB,EACD,QACA,SAAS,OAAO,WACjB;AAKL,MAAK,MAAM,aAAa,eACtB,KAAI,CAAC,eAAe,IAAI,UAAU,CAChC,WACE,KACA;EACE,MAAM;EACN;EACA;EACA,QAAQ,SAAS,OAAO;EACzB,EACD,SAAS,OAAO,YAChB,OACD;AAKL,MAAK,MAAM,aAAa,gBAAgB;AACtC,MAAI,CAAC,eAAe,IAAI,UAAU,CAAE;EAEpC,MAAM,YAAY,SAAS,OAAO;EAClC,MAAM,YAAY,SAAS,OAAO;AAElC,MAAI,mBAAmB,WAAW,UAAU,CAC1C,WACE,KACA;GACE,MAAM;GACN;GACA;GACA,QAAQ;GACR,OAAO;GACR,EACD,WACA,UACD;;;;;;;;;;;AAaP,SAAS,eACP,KACA,UACA,YACA,YACM;CACN,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,cAAc,EAAE,CAAC,CAAC;CACtD,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,cAAc,EAAE,CAAC,CAAC;AAGtD,MAAK,MAAM,aAAa,QACtB,KAAI,CAAC,QAAQ,IAAI,UAAU,CACzB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA;EACA,OAAO,WAAY;EACpB,CAAC;AAKN,MAAK,MAAM,aAAa,QACtB,KAAI,CAAC,QAAQ,IAAI,UAAU,CACzB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA;EACA,QAAQ,WAAY;EACrB,CAAC;AAKN,MAAK,MAAM,aAAa,QACtB,KAAI,QAAQ,IAAI,UAAU,EAAE;EAC1B,MAAM,WAAW,WAAY;EAC7B,MAAM,WAAW,WAAY;EAE7B,MAAM,eAAe,KAAK,UAAU,SAAS,OAAO,UAAU,CAAC;EAC/D,MAAM,eAAe,KAAK,UAAU,SAAS,OAAO,UAAU,CAAC;AAE/D,MAAI,iBAAiB,gBAAgB,SAAS,WAAW,SAAS,QAAQ;GACxE,MAAM,UAAoB,EAAE;AAC5B,OAAI,iBAAiB,aAAc,SAAQ,KAAK,iBAAiB;AACjE,OAAI,SAAS,WAAW,SAAS,OAAQ,SAAQ,KAAK,4BAA4B;AAClF,OAAI,QAAQ,KAAK;IACf,MAAM;IACN;IACA;IACA,QAAQ,QAAQ,KAAK,KAAK;IAC1B,QAAQ;IACR,OAAO;IACR,CAAC;;;;;;;;;;;;AAcV,SAAS,aACP,KACA,UACA,UACA,UACM;CACN,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,YAAY,EAAE,CAAC,CAAC;CACpD,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,YAAY,EAAE,CAAC,CAAC;AAGpD,MAAK,MAAM,YAAY,QACrB,KAAI,CAAC,QAAQ,IAAI,SAAS,CACxB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,WAAW;EACX,OAAO,SAAU;EAClB,CAAC;AAKN,MAAK,MAAM,YAAY,QACrB,KAAI,CAAC,QAAQ,IAAI,SAAS,CACxB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,WAAW;EACX,QAAQ,SAAU;EACnB,CAAC;AAKN,MAAK,MAAM,YAAY,QACrB,KAAI,QAAQ,IAAI,SAAS,EACvB;MAAI,SAAU,cAAc,SAAU,UACpC,KAAI,QAAQ,KAAK;GACf,MAAM;GACN;GACA,WAAW;GACX,QAAQ;GACR,QAAQ,SAAU;GAClB,OAAO,SAAU;GAClB,CAAC;;;;;;;;;;;;AAeV,SAAS,qBACP,KACA,UACA,kBACA,kBACA,kBACM;CACN,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,oBAAoB,EAAE,CAAC,CAAC;CAC5D,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,oBAAoB,EAAE,CAAC,CAAC;AAG5D,MAAK,MAAM,WAAW,QACpB,KAAI,CAAC,QAAQ,IAAI,QAAQ,CACvB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,kBAAkB;EAClB;EACA,OAAO,iBAAkB;EAC1B,CAAC;AAKN,MAAK,MAAM,WAAW,QACpB,KAAI,CAAC,QAAQ,IAAI,QAAQ,CACvB,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,kBAAkB;EAClB;EACA,QAAQ,iBAAkB;EAC3B,CAAC;AAKN,MAAK,MAAM,WAAW,QACpB,KAAI,QAAQ,IAAI,QAAQ,EAAE;EACxB,MAAM,SAAS,iBAAkB;EACjC,MAAM,SAAS,iBAAkB;EAEjC,MAAM,UAAoB,EAAE;AAC5B,MAAI,OAAO,eAAe,OAAO,WAAY,SAAQ,KAAK,qBAAqB;AAC/E,MAAI,OAAO,gBAAgB,OAAO,YAAa,SAAQ,KAAK,sBAAsB;AAClF,MAAI,OAAO,gBAAgB,OAAO,YAAa,SAAQ,KAAK,sBAAsB;AAClF,MAAI,OAAO,YAAY,OAAO,QAAS,SAAQ,KAAK,kBAAkB;AAEtE,MAAI,QAAQ,SAAS,EACnB,KAAI,QAAQ,KAAK;GACf,MAAM;GACN;GACA,kBAAkB;GAClB;GACA,QAAQ,QAAQ,KAAK,KAAK;GAC1B,QAAQ;GACR,OAAO;GACR,CAAC;;;;;;;;;;;;;AAgBV,SAAS,mBACP,KACA,UACA,eACA,eACA,YACA,YACM;CAIN,MAAM,oBAFe,KAAK,UAAU,iBAAiB,KAAK,KACrC,KAAK,UAAU,iBAAiB,KAAK;CAM1D,MAAM,iBAFY,KAAK,UAAU,cAAc,KAAK,KAClC,KAAK,UAAU,cAAc,KAAK;AAGpD,KAAI,qBAAqB,gBAAgB;EACvC,MAAM,UAAoB,EAAE;AAC5B,MAAI,kBAAmB,SAAQ,KAAK,oBAAoB;AACxD,MAAI,eAAgB,SAAQ,KAAK,iBAAiB;AAElD,MAAI,QAAQ,KAAK;GACf,MAAM;GACN;GACA,QAAQ,GAAG,QAAQ,KAAK,QAAQ,CAAC;GACjC,QAAQ;IAAE,kBAAkB;IAAe,eAAe;IAAY;GACtE,OAAO;IAAE,kBAAkB;IAAe,eAAe;IAAY;GACtE,CAAC;;;;;;;;;AAUN,SAAgB,iBAAiB,UAA0B,SAAwC;CACjG,MAAM,MAAmB;EAAE,SAAS,EAAE;EAAE,iBAAiB,EAAE;EAAE;CAE7D,MAAM,oBAAoB,IAAI,IAAI,OAAO,KAAK,SAAS,MAAM,CAAC;CAC9D,MAAM,mBAAmB,IAAI,IAAI,OAAO,KAAK,QAAQ,MAAM,CAAC;AAG5D,MAAK,MAAM,YAAY,iBACrB,KAAI,CAAC,kBAAkB,IAAI,SAAS,CAClC,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,OAAO,QAAQ,MAAM;EACtB,CAAC;AAKN,MAAK,MAAM,YAAY,kBACrB,KAAI,CAAC,iBAAiB,IAAI,SAAS,CACjC,KAAI,QAAQ,KAAK;EACf,MAAM;EACN;EACA,QAAQ,SAAS,MAAM;EACxB,CAAC;AAKN,MAAK,MAAM,YAAY,kBAAkB;AACvC,MAAI,CAAC,kBAAkB,IAAI,SAAS,CAAE;EAEtC,MAAM,WAAW,SAAS,MAAM;EAChC,MAAM,WAAW,QAAQ,MAAM;AAG/B,oBAAkB,KAAK,UAAU,UAAU,SAAS;AAGpD,iBAAe,KAAK,UAAU,SAAS,SAAS,SAAS,QAAQ;AAGjE,eAAa,KAAK,UAAU,SAAS,OAAO,SAAS,MAAM;AAG3D,uBACE,KACA,UACA,WACA,SAAS,sBACT,SAAS,qBACV;AACD,uBACE,KACA,UACA,YACA,SAAS,uBACT,SAAS,sBACV;AAGD,qBACE,KACA,UACA,SAAS,aAAa,QACtB,SAAS,aAAa,QACtB,SAAS,aAAa,KACtB,SAAS,aAAa,IACvB;;AAGH,QAAO;EACL,SAAS;EACT,WAAW,QAAQ;EACnB,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,SAAS,IAAI;EACb,oBAAoB,IAAI,gBAAgB,SAAS;EACjD,iBAAiB,IAAI;EACrB,yBAAyB,IAAI,gBAAgB,SAAS;EACvD;;;;;;;;;AAUH,SAAgB,8BACd,UACA,YACA,WACe;AAEf,QAAO,iBAAiB,UADA,6BAA6B,YAAY,UAAU,CACzB;;;;;;;;;;;;;AAiEpD,SAAgB,uBAAuB,eAAmD;CACxF,MAAM,SAAqC,EAAE;AAE7C,KAAI,CAACA,KAAG,WAAW,cAAc,CAE/B,QAAO;CAIT,MAAM,iBAAiB,kBAAkB,cAAc;AACvD,KAAI,eAAe,WAAW,EAE5B,QAAO;CAIT,MAAM,cAAwB,EAAE;CAChC,MAAM,YAAsB,EAAE;AAE9B,MAAK,MAAM,QAAQ,eACjB,KAAI,KAAK,SAAS,SAChB,aAAY,KAAK,KAAK,OAAO;UACpB,KAAK,SAAS,OACvB,WAAU,KAAK,KAAK,OAAO;AAK/B,KAAI,CAAC,YAAY,SAAS,sBAAsB,CAC9C,QAAO,KAAK;EACV,MAAM;EACN,SAAS,4BAA4B,sBACnC,sBACD,CAAC;EACF,iBAAiB;EAClB,CAAC;AAIJ,MAAK,MAAM,OAAO,YAChB,KAAI,QAAQ,sBACV,QAAO,KAAK;EACV,MAAM;EACN,SAAS,kCAAkC,sBACzC,IACD,CAAC,oCAAoC,sBAAsB,sBAAsB;EAClF,iBAAiB;EAClB,CAAC;CAKN,MAAM,aAAa,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,aAAa,GAAG,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,MAAM,IAAI,EAAE;AAErF,KAAI,WAAW,WAAW,EACxB,QAAO;AAIT,MAAK,MAAM,OAAO,YAChB,KAAI,QAAQ,yBAAyB,UAAU,SAAS,IAAI,CAC1D,QAAO,KAAK;EACV,MAAM;EACN,SAAS,aAAa,sBAAsB,IAAI,CAAC;EACjD,iBAAiB;EAClB,CAAC;CAKN,MAAM,SAAS,KAAK,IAAI,GAAG,WAAW;AACtC,MAAK,IAAI,IAAI,uBAAuB,KAAK,QAAQ,IAC/C,KAAI,CAAC,WAAW,SAAS,EAAE,CACzB,QAAO,KAAK;EACV,MAAM;EACN,SAAS,aAAa,sBAAsB,EAAE,CAAC;EAC/C,iBAAiB;EAClB,CAAC;AAKN,MAAK,MAAM,OAAO,WAChB,KAAI,MAAM,yBAAyB,CAAC,UAAU,SAAS,IAAI,CACzD,QAAO,KAAK;EACV,MAAM;EACN,SAAS,aAAa,sBAAsB,IAAI,CAAC;EACjD,iBAAiB;EAClB,CAAC;AAIN,QAAO;;;;;;;;AAST,SAAgB,0BAA0B,eAAuB,WAAyB;CACxF,MAAM,SAAS,uBAAuB,cAAc;AACpD,KAAI,OAAO,SAAS,GAAG;EACrB,MAAM,gBAAgB,OAAO,KAAK,MAAM,OAAO,EAAE,UAAU,CAAC,KAAK,KAAK;AACtE,QAAM,IAAI,MACR,mDAAmD,UAAU,MAAM,gBACpE;;;;;;;;AAaL,SAAS,8BACP,YACqC;CACrC,MAAM,SAA8C,EAAE;CACtD,MAAM,eAAe,WAAW,QAAQ,UAAU,EAAE;AAEpD,MAAK,MAAM,CAAC,WAAW,gBAAgB,OAAO,QAAQ,aAAa,EAAE;EACnE,MAAM,SAA8B;GAClC,MAAM,YAAY;GAClB,UAAU,YAAY;GACvB;AAED,MAAI,YAAY,MAAO,QAAO,QAAQ;AACtC,MAAI,YAAY,MAAO,QAAO,QAAQ;AACtC,MAAI,YAAY,OAAQ,QAAO,SAAS;AACxC,MAAI,YAAY,YAAY;AAC1B,UAAO,aAAa;AACpB,OAAI,YAAY,eAAgB,QAAO,iBAAiB,YAAY;AACpE,OAAI,YAAY,gBAAiB,QAAO,kBAAkB,YAAY;;AAExE,MAAI,YAAY,iBAAiB,YAAY,cAAc,SAAS,EAClE,QAAO,gBAAgB,YAAY,cAAc,KAAK,OAAO;GAC3D,OAAO,EAAE;GACT,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,aAAa;GACpD,EAAE;AAGL,MAAI,YAAY,YAAa,QAAO,cAAc,YAAY;AAC9D,MAAI,YAAY,OAAQ,QAAO,SAAS;AAExC,MAAI,YAAY,OAAO;AACrB,UAAO,QAAQ,EAAE;AACjB,OAAI,YAAY,MAAM,QAAQ,KAC5B,QAAO,MAAM,SAAS,EAAE,MAAM,YAAY,MAAM,OAAO,MAAM;AAE/D,OAAI,YAAY,MAAM,QAAQ,KAC5B,QAAO,MAAM,SAAS,EAAE,MAAM,YAAY,MAAM,OAAO,MAAM;;AAIjE,MAAI,YAAY,YAAY,YAAY,SAAS,SAAS,EACxD,QAAO,WAAW,YAAY,SAAS,KAAK,OAAO;GACjD,QAAQ,EAAE,MAAM,EAAE,QAAQ,QAAQ,IAAI;GACtC,cAAc,EAAE,gBAAgB;GACjC,EAAE;AAGL,MAAI,YAAY,OACd,QAAO,SAAS;GACd,OAAO,OAAO,YAAY,OAAO,MAAM;GACvC,GAAI,YAAY,OAAO,YAAY,EAAE,UAAU,OAAO,YAAY,OAAO,SAAS,EAAE;GACpF,GAAI,YAAY,OAAO,UAAU,EAAE,QAAQ,YAAY,OAAO,QAAQ;GACvE;AAGH,MAAI,YAAY,UAAU,OAAW,QAAO,QAAQ,YAAY;AAIhE,SAAO,aAAa;;AAGtB,QAAO;;;;;;;;;;AAWT,SAAS,cACP,UACA,WACA,aACA,eACoB;CACpB,MAAM,cAAwB,EAAE;AAGhC,KAAI,YAAY,SAAS,cAAc,KACrC,aAAY,KAAK,gBAAgB,YAAY,KAAK,aAAa,cAAc,OAAO;AAItF,KAAI,YAAY,aAAa,cAAc,SACzC,aAAY,KACV,oBAAoB,YAAY,SAAS,aAAa,cAAc,WACrE;CAIH,MAAM,cAAc,YAAY,SAAS;CACzC,MAAM,gBAAgB,cAAc,SAAS;AAC7C,KAAI,gBAAgB,cAClB,aAAY,KAAK,iBAAiB,YAAY,aAAa,gBAAgB;CAI7E,MAAM,eAAe,YAAY,UAAU;CAC3C,MAAM,iBAAiB,cAAc,UAAU;AAC/C,KAAI,iBAAiB,eACnB,aAAY,KAAK,kBAAkB,aAAa,aAAa,iBAAiB;CAIhF,MAAM,WAAW,YAAY,cAAc;CAC3C,MAAM,aAAa,cAAc,cAAc;AAC/C,KAAI,aAAa,WACf,aAAY,KAAK,sBAAsB,SAAS,aAAa,aAAa;AAI5E,KAAI,YAAY,mBAAmB,cAAc,eAC/C,aAAY,KACV,0BAA0B,YAAY,kBAAkB,OAAO,aAAa,cAAc,kBAAkB,SAC7G;CAGH,MAAM,gBAAgB,YAAY,iBAAiB,EAAE;CACrD,MAAM,kBAAkB,cAAc,iBAAiB,EAAE;CACzD,MAAM,sBAAsB,IAAI,IAAI,cAAc,KAAK,MAAM,EAAE,MAAM,CAAC;CACtE,MAAM,wBAAwB,IAAI,IAAI,gBAAgB,KAAK,MAAM,EAAE,MAAM,CAAC;AAC1E,KAAI,oBAAoB,SAAS,sBAAsB,KACrD,aAAY,KACV,+BAA+B,oBAAoB,KAAK,aAAa,sBAAsB,OAC5F;MACI;AACL,OAAK,MAAM,KAAK,oBACd,KAAI,CAAC,sBAAsB,IAAI,EAAE,EAAE;AACjC,eAAY,KAAK,8BAA8B,EAAE,mBAAmB;AACpE;;AAGJ,OAAK,MAAM,KAAK,sBACd,KAAI,CAAC,oBAAoB,IAAI,EAAE,EAAE;AAC/B,eAAY,KAAK,gCAAgC,EAAE,iBAAiB;AACpE;;;CAKN,MAAM,eAAe,YAAY,UAAU;CAC3C,MAAM,iBAAiB,cAAc,UAAU;AAC/C,KAAI,iBAAiB,eACnB,aAAY,KAAK,kBAAkB,aAAa,aAAa,iBAAiB;AAGhF,KAAI,YAAY,UAAU,cAAc,MACtC,aAAY,KAAK,iBAAiB,YAAY,MAAM,aAAa,cAAc,QAAQ;AAGzF,KAAI,YAAY,SAAS,EACvB,QAAO;EACL;EACA,MAAM;EACN;EACA,SAAS,YAAY,KAAK,KAAK;EAChC;AAGH,QAAO;;;;;AAMT,MAAM,gBAAgB,IAAI,IAAI,CAAC,KAAK,CAAC;;;;;;;AAQrC,SAAgB,0BACd,aACA,UACe;CACf,MAAM,SAAwB,EAAE;CAGhC,MAAM,gCAAgB,IAAI,KAAgC;AAC1D,MAAK,MAAM,cAAc,YACvB,eAAc,IAAI,WAAW,MAAM,WAAW;CAGhD,MAAM,oBAAoB,IAAI,IAAI,OAAO,KAAK,SAAS,MAAM,CAAC;CAC9D,MAAM,kBAAkB,IAAI,IAAI,cAAc,MAAM,CAAC;AAGrD,MAAK,MAAM,YAAY,kBACrB,KAAI,CAAC,gBAAgB,IAAI,SAAS,CAChC,QAAO,KAAK;EACV;EACA,MAAM;EACN,SAAS,SAAS,SAAS;EAC5B,CAAC;AAKN,MAAK,MAAM,YAAY,gBACrB,KAAI,CAAC,kBAAkB,IAAI,SAAS,CAClC,QAAO,KAAK;EACV;EACA,MAAM;EACN,SAAS,SAAS,SAAS;EAC5B,CAAC;AAKN,MAAK,MAAM,YAAY,mBAAmB;AACxC,MAAI,CAAC,gBAAgB,IAAI,SAAS,CAAE;EAEpC,MAAM,aAAa,cAAc,IAAI,SAAS;EAC9C,MAAM,eAAe,SAAS,MAAM;EAEpC,MAAM,eAAe,8BAA8B,WAAW;EAC9D,MAAM,iBAAiB,aAAa;EAGpC,MAAM,mBAAmB,IAAI,IAC3B,OAAO,KAAK,aAAa,CAAC,QAAQ,MAAM,CAAC,cAAc,IAAI,EAAE,CAAC,CAC/D;EACD,MAAM,qBAAqB,IAAI,IAC7B,OAAO,KAAK,eAAe,CAAC,QAAQ,MAAM,CAAC,cAAc,IAAI,EAAE,CAAC,CACjE;AAGD,OAAK,MAAM,aAAa,mBACtB,KAAI,CAAC,iBAAiB,IAAI,UAAU,CAClC,QAAO,KAAK;GACV;GACA,MAAM;GACN;GACA,SAAS,UAAU,UAAU;GAC9B,CAAC;AAKN,OAAK,MAAM,aAAa,iBACtB,KAAI,CAAC,mBAAmB,IAAI,UAAU,CACpC,QAAO,KAAK;GACV;GACA,MAAM;GACN;GACA,SAAS,UAAU,UAAU;GAC9B,CAAC;AAKN,OAAK,MAAM,aAAa,oBAAoB;AAC1C,OAAI,CAAC,iBAAiB,IAAI,UAAU,CAAE;GAEtC,MAAM,QAAQ,cACZ,UACA,WACA,aAAa,YACb,eAAe,WAChB;AACD,OAAI,MACF,QAAO,KAAK,MAAM;;;AAKxB,QAAO;;;;;;;AAQT,SAAgB,mBAAmB,QAA+B;AAChE,KAAI,OAAO,WAAW,EACpB,QAAO;CAGT,MAAM,QAAkB,EAAE;CAG1B,MAAM,+BAAe,IAAI,KAA4B;AACrD,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,WAAW,aAAa,IAAI,MAAM,SAAS,IAAI,EAAE;AACvD,WAAS,KAAK,MAAM;AACpB,eAAa,IAAI,MAAM,UAAU,SAAS;;AAG5C,MAAK,MAAM,CAAC,UAAU,eAAe,cAAc;AACjD,QAAM,KAAK,WAAW,SAAS,IAAI;AACnC,OAAK,MAAM,SAAS,WAClB,KAAI,MAAM,UACR,OAAM,KAAK,gBAAgB,MAAM,UAAU,KAAK,MAAM,UAAU;MAEhE,OAAM,KAAK,SAAS,MAAM,UAAU;;AAK1C,QAAO,MAAM,KAAK,KAAK;;;;;;;;;;;;;;;;;;;;;;;ACz+DzB,eAAsB,sBACpB,YACA,WACA,iBACgC;CAEhC,MAAM,YAAY,KAAK,QAAQ,YAAY,EAAE,aAAa;AAC1D,MAAG,UAAU,WAAW,EAAE,WAAW,MAAM,CAAC;CAG5C,MAAM,YAAY,KAAK,KAAK,WAAW,aAAa,UAAU,GAAG,gBAAgB,WAAW;CAC5F,MAAM,aAAa,KAAK,KAAK,WAAW,aAAa,UAAU,GAAG,gBAAgB,KAAK;CAMvF,MAAM,eAAe,EAAY;8CAJN,KAAK,QAAQ,WAAW,CAAC,QAAQ,OAAO,IAAI,CAKR;;;;;;;;;;;0BAWvC,UAAU;;;;;;;AAOlC,MAAG,cAAc,WAAW,aAAa;CAEzC,IAAI;AACJ,KAAI;AACF,aAAW,MAAM,iBAAiB;SAC5B;AACN,aAAW;;AAIb,OAAM,SAAS,MACb,SAAS,aAAa;EACpB,OAAO;EACP,QAAQ;GACN,MAAM;GACN,QAAQ;GACR,WAAW;GACX,QAAQ;GACR,sBAAsB;GACtB,SAAS,EACP,UAAU,YACX;GACF;EACD,UAAU,CAAC,WAAW;EACtB,SAAS,EACP,gBAAgB,CAAC,QAAQ,SAAS,EACnC;EACD;EACA,WAAW;GACT,mBAAmB;GACnB,aAAa;GACb,0BAA0B;GAC3B;EACD,UAAU;EACX,CAAC,CACH;AAOD,QAAO;EACL;EACA;EACA,aAPkBC,KAAG,aAAa,YAAY,QAAQ;EAQvD;;;;;;;;AC9FH,MAAa,yBAAyB;;;;AAKtC,MAAa,sBAAsB;;;;;;AA4DnC,SAAgB,0BAA0B,OAA8B;AACtE,KAAI,CAAC,MAAM,WAAW,uBAAuB,CAAE,QAAO;CACtD,MAAM,SAAS,MAAM,MAAM,EAA8B;CACzD,MAAM,MAAM,SAAS,QAAQ,GAAG;AAChC,QAAO,MAAM,IAAI,GAAG,OAAO;;;;;;;;;;;;;;AC3E7B,MAAa,wBAAwB;;;;;;;;;;;;;AA4DrC,eAAsBC,mBACpB,QACA,aACA,aACA,eAAuB,uBACO;AAC9B,QAAO,MAAM;EACX,MAAM,EAAE,cAAc,MAAM,OAAO,qBAAqB;GACtD;GACA;GACD,CAAC;AAEF,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,cAAc,YAAY,cAAc;AAI1D,MACE,UAAU,WAAW,yBAAyB,WAC9C,UAAU,WAAW,yBAAyB,OAE9C,QAAO;GACL,QAAQ,UAAU;GAClB,MAAM,UAAU;GAChB,QAAQ,UAAU;GACnB;AAIH,QAAM,IAAI,SAAS,cAAY,WAAWC,WAAS,aAAa,CAAC;;;;;;;;;;;;;AAcrE,eAAsB,cACpB,SACgC;CAChC,MAAM,EAAE,QAAQ,aAAa,MAAM,MAAM,YAAK,SAAS,iBAAiB;CAGxE,MAAM,WAAW,MAAM,OAAO,eAAe;EAC3C;EACA;EACA;EACA,KAAKC,SAAO,KAAK,UAAU,EAAE,CAAC;EAC9B;EACD,CAAC;CACF,MAAM,cAAc,SAAS;CAG7B,MAAM,SAAS,MAAMF,mBAAiB,QAAQ,aAAa,aAAa,aAAa;AAErF,KAAI,OAAO,WAAW,yBAAyB,QAC7C,QAAO;EACL,SAAS;EACT,MAAM,OAAO;EACb,QAAQ,OAAO;EAChB;KAED,QAAO;EACL,SAAS;EACT,MAAM,OAAO;EACb,QAAQ,OAAO,UAAU,SAAS;EAClC,OAAO,OAAO,UAAU,SAAS,UAAU;EAC5C;;;;;;;;;;;;;;;;;AC9EL,eAAe,0BACb,QACA,aACA,WACiB;AACjB,KAAI;EACF,MAAMG,QAAM,GAAG,UAAU,YAAY,CAAC,YAAY;EAElD,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAAE,YAAK,CAAC;EAEtD,MAAM,QAAQ,UAAU,OAAO;AAE/B,MAAI,CAAC,MACH,QAAO;AAGT,SADY,0BAA0B,MAAM,IAC9B;SACR;AACN,SAAO;;;;;;;;;;AAWX,eAAsB,wBACpB,QACA,aACA,0BAC6B;CAC7B,MAAM,oBAAwC,EAAE;AAEhD,MAAK,MAAM,EAAE,WAAW,mBAAmB,0BAA0B;EAEnE,MAAM,mBAAmB,MAAM,0BAA0B,QAAQ,aAAa,UAAU;EAGxF,MAAM,iBAAiB,kBAAkB,cAAc;AAGvD,OAAK,MAAM,QAAQ,gBAAgB;AACjC,OAAI,KAAK,UAAU,iBACjB;GAIF,MAAM,WAAW,qBAAqB,eAAe,KAAK,QAAQ,OAAO;AACzE,OAAI,CAACC,KAAG,WAAW,SAAS,CAC1B;GAIF,MAAM,OAAO,SAAS,SAAS;GAG/B,MAAM,aAAa,qBAAqB,eAAe,KAAK,QAAQ,UAAU;AAC9E,OAAI,KAAK,2BAA2B,CAACA,KAAG,WAAW,WAAW,EAAE;AAC9D,WAAO,KACL,aAAa,UAAU,GAAG,KAAK,OAAO,6CACvC;AACD;;AAGF,qBAAkB,KAAK;IACrB,QAAQ,KAAK;IACb;IACA;IACA;IACA;IACA;IACD,CAAC;;;AAKN,mBAAkB,MAAM,GAAG,MAAM;AAC/B,MAAI,EAAE,cAAc,EAAE,UACpB,QAAO,EAAE,UAAU,cAAc,EAAE,UAAU;AAE/C,SAAO,EAAE,SAAS,EAAE;GACpB;AAEF,QAAO;;;;;;;;AAaT,eAAe,uBACb,SACA,WAC0B;CAC1B,MAAM,EAAE,QAAQ,aAAa,gBAAgB;CAY7C,MAAM,SAAS,MAAM,cAAc;EACjC;EACA;EACA,MAboB,aAAa,UAAU,UAAU,GAAG,sBAAsB,UAAU,OAAO,CAAC;EAchG,OAXmB,MAAM,sBACzB,UAAU,YACV,UAAU,WACV,UAAU,OACX,EAOoB;EACnB,SAAS;EACV,CAAC;AAEF,QAAO;EACL,WAAW,UAAU;EACrB,iBAAiB,UAAU;EAC3B,SAAS,OAAO;EAChB,MAAM,OAAO;EACb,OAAO,OAAO;EACf;;;;;;;;;;AAWH,eAAsB,qBACpB,QACA,aACA,WACA,iBACe;CACf,MAAMD,QAAM,GAAG,UAAU,YAAY,CAAC,YAAY;CAGlD,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAAE,YAAK,CAAC;CACtD,MAAM,iBAAiB,UAAU,UAAU,EAAE;CAE7C,MAAM,WAAW,IAAI,sBAAsB,gBAAgB;AAG3D,OAAM,OAAO,YAAY;EACvB;EACA,QAAQ;GACN,GAAG;IACF,sBAAsB;GACxB;EACF,CAAC;;;;;;;;AASJ,eAAsB,kBACpB,SACA,YACe;CAEf,MAAM,wBAAwB,WAAW,QAAQ,MAAM,EAAE,KAAK,wBAAwB;AAEtF,KAAI,sBAAsB,WAAW,EACnC;CAIF,MAAM,wBAAwB,2BAA2B,sBAAsB;AAG/E,MAAK,MAAM,CAAC,WAAW,wBAAwB,uBAAuB;EAEpE,MAAM,kBADW,QAAQ,SAAS,YACA;EAGlC,MAAM,kBAAkB,wBAAwB,iBAAiB,QAAQ,aAAa;AACtF,MAAI,CAAC,gBACH,OAAM,IAAI,MACR,mEAAmE,UAAU,kGAE9E;EAIH,MAAM,cAAc,OAAO,mBAAmB;GAC5C,WAAW,QAAQ;GACnB;GACD,CAAC;EAEF,MAAM,UAAqC;GACzC,QAAQ,QAAQ;GAChB,aAAa,QAAQ;GACrB;GACD;AAED,SAAO,KAAK,uBAAuB,OAAO,KAAK,gBAAgB,CAAC,kBAAkB,UAAU,GAAG;AAE/F,OAAK,MAAM,aAAa,qBAAqB;GAC3C,MAAM,iBAAiB,GAAG,UAAU,UAAU,GAAG,sBAAsB,UAAU,OAAO;GACxF,MAAM,UAAU,IAAI;IAClB,MAAM,uBAAuB,eAAe;IAC5C,YAAY;IACb,CAAC,CAAC,OAAO;GAEV,MAAM,SAAS,MAAM,uBAAuB,SAAS,UAAU;AAE/D,OAAI,OAAO,SAAS;AAClB,YAAQ,QAAQ,aAAa,eAAe,yBAAyB;AAGrE,QAAI,OAAO,QAAQ,OAAO,KAAK,MAAM,CACnC,QAAO,IAAI,UAAU,OAAO,OAAO;UAEhC;AACL,YAAQ,KAAK,aAAa,eAAe,SAAS;AAClD,QAAI,OAAO,KACT,QAAO,MAAM,UAAU,OAAO,OAAO;AAEvC,UAAM,IAAI,MAAM,OAAO,SAAS,mBAAmB;;;;;;;;;;;;;;;AAgB3D,SAAgB,wBACd,iBACA,cACoB;AAEpB,KAAI,iBAAiB,YACnB,QAAO,gBAAgB;AAIzB,KAAI,gBAAgB,aAAa,SAAS,EACxC,QAAO,aAAa;;;;;;;AAWxB,SAAgB,2BACd,YACiC;CACjC,MAAM,0BAAU,IAAI,KAAiC;AACrD,MAAK,MAAM,aAAa,YAAY;EAClC,MAAM,WAAW,QAAQ,IAAI,UAAU,UAAU,IAAI,EAAE;AACvD,WAAS,KAAK,UAAU;AACxB,UAAQ,IAAI,UAAU,WAAW,SAAS;;AAE5C,QAAO;;;;;;;;;;;;ACzPT,eAAe,iBACb,QACA,aACA,WAC8B;AAC9B,QAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,MAAI;GACF,MAAM,EAAE,eAAe,kBAAkB,MAAM,OAAO,kBAAkB;IACtE;IACA,eAAe;IACf;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,eAAe,cAAc;WAC9B,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;;;;;;;;;AAUJ,eAAe,yBACb,QACA,aACA,WACwB;AACxB,KAAI;EACF,MAAME,QAAM,GAAG,UAAU,YAAY,CAAC,YAAY;EAClD,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAAE,YAAK,CAAC;EACtD,MAAM,QAAQ,UAAU,SAAS;AACjC,MAAI,CAAC,MAAO,QAAO;EACnB,MAAM,QAAQ,MAAM,MAAM,WAAW;AACrC,SAAO,QAAQ,SAAS,MAAM,IAAI,GAAG,GAAG;SAClC;AACN,SAAO;;;;;;;;;;AAWX,eAAe,mBACb,QACA,aACA,0BAC2C;CAC3C,MAAM,UAA4C,EAAE;AAEpD,MAAK,MAAM,EAAE,WAAW,mBAAmB,0BAA0B;EAEnE,MAAM,wBAAwB,MAAM,yBAAyB,QAAQ,aAAa,UAAU;AAI5F,MAAI,0BAA0B,MAAM;AAClC,WAAQ,KAAK;IACX;IACA,uBAAuB;IACvB,QAAQ,EAAE;IACV,UAAU;IACX,CAAC;AACF;;EAIF,MAAM,mBAAmB,kCACvB,eACA,sBACD;AACD,MAAI,CAAC,kBAAkB;AAErB,WAAQ,KAAK;IACX;IACA;IACA,QAAQ,EAAE;IACV,UAAU;IACX,CAAC;AACF;;EAOF,MAAM,SAAS,0BAHK,MAAM,iBAAiB,QAAQ,aAAa,UAAU,EAGpB,iBAAiB;AAEvE,UAAQ,KAAK;GACX;GACA;GACA;GACA,UAAU,OAAO,SAAS;GAC3B,CAAC;;AAGJ,QAAO;;;;;;;AAQT,SAAS,gCAAgC,SAAmD;CAC1F,MAAM,QAAkB,EAAE;AAE1B,MAAK,MAAM,UAAU,SAAS;AAC5B,MAAI,CAAC,OAAO,SAAU;AAEtB,QAAM,KAAK,cAAc,OAAO,YAAY;AAC5C,QAAM,KAAK,uBAAuB,sBAAsB,OAAO,sBAAsB,GAAG;AACxF,QAAM,KAAK,iBAAiB;AAC5B,QAAM,KAAK,mBAAmB,OAAO,OAAO,CAAC;AAC7C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;;;;;;;;;;AAgBzB,eAAe,4BACb,QACA,aACA,kBACA,QACA,eAC6B;CAE7B,MAAM,2BAA2B,4BAA4B,QAD3C,KAAK,QAAQ,OAAO,KAAK,CACoC;CAC/E,IAAI,oBAAwC,EAAE;AAE9C,KAAI,yBAAyB,SAAS,GAAG;AAEvC,OAAK,MAAM,EAAE,WAAW,mBAAmB,yBACzC,2BAA0B,eAAe,UAAU;AAIrD,MAAI,CAAC,eAAe;GAElB,MAAM,mBAAmB,MAAM,oBAC7B,kBACA,yBACD;AAGD,OAFiB,iBAAiB,MAAM,MAAM,EAAE,QAAQ,EAE1C;AACZ,WAAO,MAAM,2DAA2D;AACxE,WAAO,IAAI,4BAA4B,iBAAiB,CAAC;AACzD,WAAO,SAAS;AAChB,WAAO,KAAK,0EAA0E;AACtF,WAAO,KAAK,iDAAiD;AAC7D,UAAM,IAAI,MAAM,gCAAgC;;GAIlD,MAAM,4BAA4B,MAAM,mBACtC,QACA,aACA,yBACD;AAGD,OAFuB,0BAA0B,MAAM,MAAM,EAAE,SAAS,EAEpD;AAClB,WAAO,MAAM,gCAAgC;AAC7C,WAAO,IAAI,gCAAgC,0BAA0B,CAAC;AACtE,WAAO,SAAS;AAChB,WAAO,KAAK,qBAAqB;AACjC,WAAO,KAAK,sDAAsD,EAAE,MAAM,SAAS,CAAC;AACpF,WAAO,KAAK,gDAAgD,EAAE,MAAM,SAAS,CAAC;AAC9E,WAAO,KAAK,wCAAwC,EAAE,MAAM,SAAS,CAAC;AACtE,WAAO,SAAS;AAChB,WAAO,KAAK,gEAAgE;AAC5E,UAAM,IAAI,MAAM,oCAAoC;;;AAKxD,sBAAoB,MAAM,wBACxB,QACA,aACA,yBACD;AAED,MAAI,kBAAkB,SAAS,GAAG;AAChC,UAAO,SAAS;GAGhB,MAAM,cAAc,kBAAkB,QAAQ,MAAM,EAAE,KAAK,wBAAwB;GACnF,MAAM,iBAAiB,kBAAkB,QAAQ,MAAM,CAAC,EAAE,KAAK,wBAAwB;AAEvF,UAAO,KAAK,YAAY,kBAAkB,OAAO,gBAAgB;AACjE,OAAI,eAAe,SAAS,EAC1B,QAAO,KACL,OAAO,eAAe,OAAO,mEAC7B,EAAE,MAAM,SAAS,CAClB;AAEH,OAAI,YAAY,SAAS,EACvB,QAAO,KACL,OAAO,YAAY,OAAO,2DAC1B,EAAE,MAAM,SAAS,CAClB;;;AAKP,QAAO;;;;;;;;;AAUT,SAAS,gCACP,QACA,kBACA,4BACkB;CAClB,MAAM,cAAc,iBAAiB,YAAY;AACjD,KAAI,CAAC,YACH,OAAM,IAAI,MAAM,+DAA+D;CAGjF,MAAM,cAAiE,EAAE;AACzE,MAAK,MAAM,aAAa,2BACtB,KAAI,EAAE,UAAU,aAAa,aAC3B,aAAY,UAAU,aAAa,iBAAiB,OAAO,KAAK,UAAU;AAM9E,QAAO;EACL;EACA,aAAa,iBAAiB;EAC9B,eAAe,YAAY,OAAO;EAClC,cAAc,YAAY,OAAO,eAC7B,OAAO,KAAK,YAAY,OAAO,aAAa,GAC5C;EACJ,UAAU;EACX;;;;;;;;AASH,eAAsB,cACpB,QACA,QACA,QAAuC,iBACxB;CACf,MAAM,EAAE,WAAW,SAAS,qBAAqB;AAEjD,KAAI,UAAU,iBAAiB;EAC7B,IAAI,oBAAwC,EAAE;EAI9C,MAAM,mCAAmB,IAAI,KAA2C;AACxE,OAAK,MAAM,YAAY,iBAAiB,YAAY,kBAAkB;GACpE,MAAM,QAAQ,SAAS;AACvB,OAAI,MACF,kBAAiB,IAAI,SAAS,WAAW,MAAM;;AAInD,sBAAoB,MAAM,4BACxB,QACA,iBAAiB,aACjB,kBACA,iBAAiB,QACjB,iBAAiB,cAClB;AAED,MAAI,kBAAkB,SAAS,GAAG;AAKhC,kBAAe,OAAO;AACtB,oBAAiB,OAAO;AAGxB,SAAM,wBAAwB,QAAQ,UAAU;GAEhD,MAAM,6BAA6B,kBAAkB,QAClD,MAAM,EAAE,KAAK,wBACf;GAGD,MAAM,eACJ,2BAA2B,SAAS,IAChC,gCAAgC,QAAQ,kBAAkB,2BAA2B,GACrF;AAGN,OAAI,2BAA2B,SAAS,GAAG;AACzC,WAAO,KAAK,aAAa,2BAA2B,OAAO,uBAAuB;AAClF,WAAO,SAAS;;AAGlB,QAAK,MAAM,aAAa,mBAAmB;AAEzC,UAAM,+BAA+B,QAAQ,WAAW,UAAU;AAGlE,QAAI,UAAU,KAAK,2BAA2B,aAC5C,OAAM,kBAAkB,cAAc,CAAC,UAAU,CAAC;AAIpD,UAAM,gCAAgC,QAAQ,WAAW,UAAU;AAGnE,UAAM,qBACJ,QACA,iBAAiB,aACjB,UAAU,WACV,UAAU,OACX;;AAGH,OAAI,2BAA2B,SAAS,GAAG;AACzC,WAAO,SAAS;AAChB,WAAO,QAAQ,8CAA8C;;GAI/D,MAAM,gCAAgC,UAAU,cAAc,QAAQ,QAAQ,QAAQ;IACpF,MAAM,UAAU,GAAG,IAAI,QAAQ,cAAc,GAAG,IAAI;AACpD,WAAO,CAAC,iBAAiB,eAAe,IAAI,QAAQ;KACpD;AACF,OAAI,8BAA8B,SAAS,EACzC,OAAM,QAAQ,IACZ,8BAA8B,KAAK,QACjC,OAAO,4BAA4B,IAAI,QAAQ,CAChD,CACF;SAEE;AAGL,SAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,aAAW;AACjD,UAAM,OAAO,sBAAsBC,SAAO,QAAQ;AAClD,UAAM,OAAO,YAAYA,SAAO,YAAY;KAC5C,EACF,GAAG,UAAU,QAAQ,QAAQ,KAAK,WAAW,OAAO,YAAY,OAAO,YAAY,CAAC,CACrF,CAAC;AAGF,OAAI;AACF,UAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,KAAK,QAAQ,KAAK,aAAW,OAAO,mBAAmBA,SAAO,QAAQ,CAAC,EACpF,GAAG,UAAU,KAAK,QAAQ,KAAK,WAAW,OAAO,mBAAmB,OAAO,QAAQ,CAAC,CACrF,CAAC;YACK,OAAO;AACd,kCAA8B,OAAO,CACnC,2EACA,yFACD,CAAC;;AAIJ,SAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,cAAc,QAAQ,KAAK,aACtC,OAAO,4BAA4BA,SAAO,QAAQ,CACnD,EACD,GAAG,UAAU,cAAc,QAAQ,KAAK,WACtC,OAAO,4BAA4B,OAAO,QAAQ,CACnD,CACF,CAAC;AAIF,SAAM,QAAQ,IACZ,UAAU,cAAc,QAAQ,KAAK,QACnC,OAAO,4BAA4B,IAAI,QAAQ,CAChD,CACF;AACD,SAAM,QAAQ,IACZ,UAAU,KAAK,QAAQ,KAAK,QAAQ,OAAO,mBAAmB,IAAI,QAAQ,CAAC,CAC5E;;YAEM,UAAU,oBAAoB;AAEvC,QAAM,QAAQ,IACZ,UAAU,cAAc,QAAQ,KAAK,QAAQ,OAAO,4BAA4B,IAAI,QAAQ,CAAC,CAC9F;AACD,QAAM,QAAQ,IAAI,UAAU,KAAK,QAAQ,KAAK,QAAQ,OAAO,mBAAmB,IAAI,QAAQ,CAAC,CAAC;YACrF,UAAU,kBAEnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,QAAQ,KAAK,QAAQ,OAAO,sBAAsB,IAAI,QAAQ,CAAC,CAClF;;;;;;;AAaL,SAAS,8BAA8B,OAAgB,UAA2B;AAChF,KACE,iBAAiB,gBACjB,MAAM,SAAS,KAAK,sBACpB,MAAM,QAAQ,SAAS,qEAAqE,EAC5F;AACA,SAAO,MACL,0FACD;AACD,SAAO,SAAS;AAChB,OAAK,MAAM,WAAW,SACpB,QAAO,KAAK,QAAQ;;AAGxB,OAAM;;;;;;;AAiBR,SAAS,wBAAwB,mBAA2D;CAC1F,MAAM,sBAA0B,IAAI,KAAK;AAEzC,MAAK,MAAM,aAAa,kBACtB,MAAK,MAAM,UAAU,UAAU,KAAK,QAElC,KACE,OAAO,SAAS,iBAChB,OAAO,SAAS,oBAChB,OAAO,SAAS,iBAChB;AACA,MAAI,CAAC,OAAO,UAAW;AAEvB,MAAI,CAAC,IAAI,IAAI,OAAO,SAAS,CAC3B,KAAI,IAAI,OAAO,0BAAU,IAAI,KAAK,CAAC;AAErC,MAAI,IAAI,OAAO,SAAS,CAAE,IAAI,OAAO,WAAW,OAAO;;AAK7D,QAAO;;;;;;;AAiBT,SAAS,kCACP,QACA,aACM;AACN,MAAK,MAAM,CAAC,WAAW,WAAW,aAAa;EAC7C,MAAM,QAAQ,OAAO;AACrB,MAAI,CAAC,MAAO;EAEZ,MAAM,SAAS,OAAO;EACtB,MAAM,QAAQ,OAAO;AAErB,MAAI,OAAO,SAAS,iBAAiB,OAAO,SAC1C,OAAM,WAAW;AAGnB,MAAI,OAAO,SAAS,iBAClB;AAIF,MAAI,CAAC,QAAQ,YAAY,OAAO,SAC9B,OAAM,WAAW;AAInB,MAAI,EAAE,QAAQ,UAAU,WAAW,OAAO,UAAU,OAClD,OAAM,SAAS;AAIjB,MAAI,QAAQ,iBAAiB,OAAO,eAAe;GACjD,MAAM,cAAc,IAAI,IAAI,MAAM,cAAc,KAAK,MAAM,EAAE,MAAM,CAAC;AAEpE,OADsB,OAAO,cAAc,QAAQ,MAAM,CAAC,YAAY,IAAI,EAAE,MAAM,CAAC,CACjE,SAAS,GAAG;IAE5B,MAAM,2BAAW,IAAI,KAAqB;AAC1C,SAAK,MAAM,KAAK,OAAO,cACrB,UAAS,IAAI,EAAE,OAAO,EAAE,eAAe,GAAG;AAE5C,SAAK,MAAM,KAAK,MAAM,cACpB,KAAI,CAAC,SAAS,IAAI,EAAE,MAAM,CACxB,UAAS,IAAI,EAAE,OAAO,EAAE,eAAe,GAAG;AAG9C,UAAM,gBAAgB,MAAM,KAAK,SAAS,SAAS,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB;KAClF;KACA;KACD,EAAE;;;;;;;;;;AAiBX,SAAS,qBAAqB,WAA0C;CACtE,MAAM,4BAAY,IAAI,KAAa;AACnC,MAAK,MAAM,UAAU,UAAU,KAAK,QAClC,WAAU,IAAI,OAAO,SAAS;AAEhC,QAAO;;;;;;;AAQT,SAAS,oBAAoB,WAA0C;CACrE,MAAM,4BAAY,IAAI,KAAa;AACnC,MAAK,MAAM,UAAU,UAAU,KAAK,QAClC,KAAI,OAAO,SAAS,eAClB,WAAU,IAAI,OAAO,SAAS;AAGlC,QAAO;;;;;;;;AAST,eAAe,wBACb,QACA,WACe;AACf,OAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,QAAQ,IAAI,OAAO,aAAW;AACjD,QAAM,OAAO,sBAAsBA,SAAO,QAAQ;AAClD,QAAM,OAAO,YAAYA,SAAO,YAAY;GAC5C,EACF,GAAG,UAAU,QAAQ,QAAQ,KAAK,WAAW,OAAO,YAAY,OAAO,YAAY,CAAC,CACrF,CAAC;;;;;AAMJ,MAAM,iBAAiB;CACrB,yBAAS,IAAI,KAAa;CAC1B,yBAAS,IAAI,KAAa;CAC1B,yCAAyB,IAAI,KAAa;CAC1C,QAAQ;AACN,OAAK,QAAQ,OAAO;AACpB,OAAK,QAAQ,OAAO;AACpB,OAAK,wBAAwB,OAAO;;CAEvC;;;;;;;;AASD,eAAe,+BACb,QACA,WACA,WACe;CAEf,MAAM,kBAAkB,wBAAwB,CAAC,UAAU,CAAC;CAC5D,MAAM,gBAAgB,qBAAqB,UAAU;CACrD,MAAM,yBAAyB,IAAI,IAAI,eAAe,QAAQ;AAG9D,OAAM,QAAQ,IAAI;EAEhB,GAAG,UAAU,KAAK,QACf,QAAQ,aAAW;GAClB,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,UAAO,YAAY,cAAc,IAAI,SAAS,IAAI,CAAC,uBAAuB,IAAI,SAAS;IACvF,CACD,KAAK,aAAW;GACf,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,OAAI,SAAU,gBAAe,QAAQ,IAAI,SAAS;GAElD,MAAM,cAAc,WAAW,gBAAgB,IAAI,SAAS,GAAG;AAE/D,OAAI,CAAC,eAAe,YAAY,SAAS,EACvC,QAAO,OAAO,mBAAmBA,SAAO,QAAQ;GAIlD,MAAM,gBAAgB,gBAAgBA,SAAO,QAAQ;AACrD,OAAI,cAAc,cAAc,QAAQ,OACtC,mCAAkC,cAAc,aAAa,OAAO,QAAQ,YAAY;AAG1F,UAAO,OAAO,mBAAmB,cAAc;IAC/C;EAEJ,GAAG,UAAU,KAAK,QACf,QAAQ,aAAW;GAClB,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,UAAO,YAAY,cAAc,IAAI,SAAS,IAAI,uBAAuB,IAAI,SAAS;IACtF,CACD,KAAK,aAAW;GACf,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,OAAI,SAAU,gBAAe,QAAQ,IAAI,SAAS;GAElD,MAAM,cAAc,WAAW,gBAAgB,IAAI,SAAS,GAAG;AAE/D,OAAI,CAAC,eAAe,YAAY,SAAS,EACvC,QAAO,OAAO,mBAAmB;IAC/B,aAAaA,SAAO,QAAQ;IAC5B,eAAeA,SAAO,QAAQ;IAC9B,cAAcA,SAAO,QAAQ;IAC9B,CAAC;GAGJ,MAAM,gBAAgB,gBAAgBA,SAAO,QAAQ;AACrD,OAAI,cAAc,cAAc,QAAQ,OACtC,mCAAkC,cAAc,aAAa,OAAO,QAAQ,YAAY;AAG1F,UAAO,OAAO,mBAAmB;IAC/B,aAAaA,SAAO,QAAQ;IAC5B,eAAeA,SAAO,QAAQ;IAC9B,cAAc,cAAc;IAC7B,CAAC;IACF;EAEJ,GAAG,UAAU,KAAK,QACf,QAAQ,WAAW;GAClB,MAAM,WAAW,OAAO,QAAQ,cAAc;AAC9C,UAAO,YAAY,cAAc,IAAI,SAAS;IAC9C,CACD,KAAK,WAAW;GACf,MAAM,WAAW,OAAO,QAAQ,cAAc;AAC9C,OAAI,SAAU,gBAAe,QAAQ,IAAI,SAAS;GAElD,MAAM,cAAc,WAAW,gBAAgB,IAAI,SAAS,GAAG;AAE/D,OAAI,CAAC,eAAe,YAAY,SAAS,EACvC,QAAO,OAAO,mBAAmB,OAAO,QAAQ;GAIlD,MAAM,gBAAgB,gBAAgB,OAAO,QAAQ;AACrD,OAAI,cAAc,cAAc,QAAQ,OACtC,mCAAkC,cAAc,aAAa,OAAO,QAAQ,YAAY;AAG1F,UAAO,OAAO,mBAAmB,cAAc;IAC/C;EACL,CAAC;AAGF,KAAI,CAAC,eAAe,wBAAwB,IAAI,UAAU,UAAU,EAAE;EACpE,MAAM,mCAAmC,UAAU,cAAc,QAAQ,QACtE,aAAWA,SAAO,QAAQ,kBAAkB,UAAU,UACxD;EACD,MAAM,mCAAmC,UAAU,cAAc,QAAQ,QACtE,WAAW,OAAO,QAAQ,kBAAkB,UAAU,UACxD;EACD,MAAM,yBAAyB,IAAI,IACjC,iCAAiC,KAAK,aAAWA,SAAO,KAAK,CAC9D;EACD,MAAM,qBAAqB,UAAU,KAAK,QAAQ,QAAQ,aAAW;GACnE,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAE9C,UADsBA,SAAO,QAAQ,kBAEjB,UAAU,aAC5B,YACA,uBAAuB,IAAI,SAAS,IACpC,CAAC,eAAe,QAAQ,IAAI,SAAS;IAEvC;AACF,MAAI,mBAAmB,SAAS,EAC9B,OAAM,QAAQ,IACZ,mBAAmB,KAAK,aAAW;GACjC,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,OAAI,SAAU,gBAAe,QAAQ,IAAI,SAAS;AAClD,UAAO,OAAO,mBAAmBA,SAAO,QAAQ;IAChD,CACH;AAEH,iBAAe,wBAAwB,IAAI,UAAU,UAAU;AAC/D,QAAM,QAAQ,IAAI,CAChB,GAAG,iCAAiC,KAAK,aACvC,OAAO,4BAA4BA,SAAO,QAAQ,CACnD,EACD,GAAG,iCAAiC,KAAK,WACvC,OAAO,4BAA4B,OAAO,QAAQ,CACnD,CACF,CAAC;;;;;;AAON,MAAM,mBAAmB;CACvB,uBAAO,IAAI,KAAa;CACxB,gCAAgB,IAAI,KAAa;CACjC,QAAQ;AACN,OAAK,MAAM,OAAO;AAClB,OAAK,eAAe,OAAO;;CAE9B;;;;;;;;AASD,eAAe,gCACb,QACA,WACA,WACe;CAEf,MAAM,kBAAkB,wBAAwB,CAAC,UAAU,CAAC;CAC5D,MAAM,gBAAgB,qBAAqB,UAAU;CACrD,MAAM,mBAAmB,oBAAoB,UAAU;AAIvD,KAAI;AACF,QAAM,QAAQ,IAAI,CAEhB,GAAG,UAAU,KAAK,QACf,QAAQ,aAAW;GAClB,MAAM,WAAWA,SAAO,QAAQ,cAAc;AAC9C,UAAO,YAAY,cAAc,IAAI,SAAS,IAAI,gBAAgB,IAAI,SAAS;IAC/E,CACD,KAAK,aACJ,OAAO,mBAAmB;GACxB,aAAaA,SAAO,QAAQ;GAC5B,eAAeA,SAAO,QAAQ;GAC9B,cAAcA,SAAO,QAAQ;GAC9B,CAAC,CACH,EAEH,GAAG,UAAU,KAAK,QACf,QAAQ,WAAW;GAClB,MAAM,WAAW,OAAO,QAAQ,cAAc;AAC9C,UAAO,YAAY,cAAc,IAAI,SAAS,IAAI,gBAAgB,IAAI,SAAS;IAC/E,CACD,KAAK,WAAW,OAAO,mBAAmB,OAAO,QAAQ,CAAC,CAC9D,CAAC;UACK,OAAO;AACd,gCAA8B,OAAO,CACnC,wFACA,gFACD,CAAC;;AAIJ,KAAI,iBAAiB,OAAO,GAAG;EAE7B,MAAM,yBAAyB,UAAU,cAAc,QAAQ,QAAQ,QAAQ;GAC7E,MAAM,UAAU,GAAG,IAAI,QAAQ,cAAc,GAAG,IAAI;AACpD,OAAI,iBAAiB,eAAe,IAAI,QAAQ,CAAE,QAAO;GAGzD,MAAM,WAAW,IAAI;AACrB,OAAI,YAAY,iBAAiB,IAAI,SAAS,EAAE;AAC9C,qBAAiB,eAAe,IAAI,QAAQ;AAC5C,WAAO;;AAET,UAAO;IACP;AACF,QAAM,QAAQ,IACZ,uBAAuB,KAAK,QAAQ,OAAO,4BAA4B,IAAI,QAAQ,CAAC,CACrF;EAGD,MAAM,gBAAgB,UAAU,KAAK,QAAQ,QAAQ,QAAQ;GAE3D,MAAM,WAAW,IAAI;AACrB,OAAI,CAAC,YAAY,iBAAiB,MAAM,IAAI,SAAS,CAAE,QAAO;AAC9D,OAAI,iBAAiB,IAAI,SAAS,EAAE;AAClC,qBAAiB,MAAM,IAAI,SAAS;AACpC,WAAO;;AAET,UAAO;IACP;AACF,QAAM,QAAQ,IAAI,cAAc,KAAK,QAAQ,OAAO,mBAAmB,IAAI,QAAQ,CAAC,CAAC;;;;;;;;AASzF,eAAsB,aAAa,SAAsB;CACvD,MAAM,EAAE,QAAQ,aAAa,aAAa,YAAY,QAAQ,kBAAkB;CAChF,MAAM,YAA+B,EAAE;AACvC,KAAI,CAAC,WACH,MAAK,MAAM,YAAY,YAAY,kBAAkB;AACnD,QAAM,SAAS,WAAW;AAC1B,YAAU,KAAK,SAAS;;CAG5B,MAAM,YAAY,aACd,EAAE,GACF,OAAO,OAAQ,MAAM,YAAY,iBAAiB,eAAe,IAAK,EAAE,CAAC;CAE7E,MAAM,EACJ,WAAW,kBACX,WACA,WACA,mBACE,MAAM,aAAa,QAAQ,aAAa,YAAY,MAAM,UAAU;CACxE,MAAM,kBAAkB,iBAAiB,QAAQ,KAAK,QAAQ,IAAI,KAAK;CACvE,MAAM,CAAC,eAAe,0BAA0B,MAAM,QAAQ,IAAI,CAChE,UAAU,QAAQ,aAAa,WAAW,WAAW,gBAAgB,EACrE,mBAAmB,QAAQ,aAAa,WAAW,gBAAgB,CACpE,CAAC;AAEF,kBAAiB,OAAO;AACxB,eAAc,OAAO;AACrB,wBAAuB,OAAO;AAE9B,QAAO;EACL,WAAW;GACT,SAAS;GACT,MAAM;GACN,eAAe;GAChB;EACD;EACA;EACA;EACA,SAAS;GACP;GACA;GACA;GACA,eAAe,iBAAiB;GACjC;EACF;;AAmBH,SAAS,IAAI,aAAqB,MAAc;AAC9C,QAAO,GAAG,UAAU,YAAY,CAAC,YAAY;;AAG/C,eAAe,aACb,QACA,aACA,SACA,WACA;CACA,MAAM,YAAY,gBAChB,oBACD;CACD,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAExC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;AACpE,MAAI;GACF,MAAM,EAAE,kBAAkB,kBAAkB,MAAM,OAAO,qBAAqB;IAC5E;IACA;IACA,UAAU;IACX,CAAC;AACF,UAAO,CAAC,kBAAkB,cAAc;WACjC,OAAO;AACd,OAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,SAAM;;GAER;CACF,MAAM,mBAA6D,EAAE;AACrE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,MAAI,CAAC,SAAS,WAAW,KACvB;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAK,IAAI,aAAa,SAAS,UAAU,KAAK,EAC/C,CAAC;AACF,mBAAiB,SAAS,UAAU,QAAQ;GAC1C;GACA,OAAO,UAAU,OAAO;GACxB,WAAW,UAAU;GACtB;GACD,CACH;AAED,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,WAAW,iBAAiB,SAAS;EAC3C,MAAM,cAAc,MAAM,iBACxB,IAAI,aAAa,SAAS,UAAU,EACpC,SACA,UAAU,UACX;AACD,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACvB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,SAAS;IACf;IACD,CAAC;AACF,UAAO,iBAAiB,SAAS;QAEjC,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf,SAAS;IACP;IACA,eAAe,SAAS;IAExB,iBAAiB;IAClB;GACD;GACD,CAAC;;AAGN,QAAO,QAAQ,iBAAiB,CAAC,SAAS,CAAC,mBAAmB;EAC5D,MAAM,QAAQ,iBAAiB,gBAAgB;AAC/C,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB,MAAM;GACN,SAAS;IACP;IACA;IACD;GACF,CAAC;GAEJ;AAEF,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;;AAkB5D,eAAe,UACb,QACA,aACA,WACA,WACA,iBACA,0BACA;CACA,MAAM,YAAY,gBAAoD,iBAAiB;CAEvF,MAAM,cAAc,kBAA0B;AAC5C,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,eAAe,kBAAkB,MAAM,OAAO,kBAAkB;KACtE;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,eAAe,cAAc;YAC9B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;CAGJ,MAAM,oCAAoB,IAAI,KAAa;AAC3C,MAAK,MAAM,YAAY,UACrB,KACE,SAAS,QAAQ,SAAS,mBAC1B,SAAS,QAAQ,SAAS,mBAC1B,SAAS,QAAQ,SAAS,gBAE1B,mBAAkB,IAAI,SAAS,QAAQ,SAAS;AAKpD,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,QAAQ,0BAA0B,IAAI,SAAS,UAAU,IAAI,SAAS;AAC5E,OAAK,MAAM,YAAY,OAAO,KAAK,MAAM,EAAE;GACzC,MAAM,OAAO,MAAM;AACnB,OAAI,kBAAkB,IAAI,SAAS,IAAI,KAAK,UAAU,kBAAkB,MACtE,OAAM,IAAI,MACR,SAAS,SAAS,qLAEnB;;;AAKP,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,gBAAgB,MAAM,WAAW,SAAS,UAAU;EAC1D,MAAM,kCAAkB,IAAI,KAAa;AACzC,gBAAc,SAAS,SAAS,gBAAgB,IAAI,KAAK,KAAK,CAAC;EAG/D,MAAM,QAAQ,0BAA0B,IAAI,SAAS,UAAU,IAAI,SAAS;AAE5E,OAAK,MAAM,YAAY,OAAO,KAAK,MAAM,EAAE;GACzC,MAAM,eAAe,6BACnB,MAAM,WACN,mBACA,SAAS,OAAO,cACjB;AACD,OAAI,gBAAgB,IAAI,SAAS,EAAE;AACjC,cAAU,QAAQ,KAAK;KACrB,MAAM;KACN,SAAS;MACP;MACA,eAAe,SAAS;MACxB;MACD;KACF,CAAC;AACF,oBAAgB,OAAO,SAAS;SAEhC,WAAU,QAAQ,KAAK;IACrB,MAAM;IACN,SAAS;KACP;KACA,eAAe,SAAS;KACxB;KACD;IACF,CAAC;;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,SAAS;KACxB,kBAAkB;KACnB;IACF,CAAC;IACF;;AAEJ,MAAK,MAAM,iBAAiB,gBAE1B,EADsB,MAAM,WAAW,cAAc,EACvC,SAAS,QAAQ;AAC7B,YAAU,QAAQ,KAAK;GACrB,MAAM,IAAI;GACV,SAAS;IACP;IACA;IACA,kBAAkB,IAAI;IACvB;GACF,CAAC;GACF;AAEJ,QAAO;;;;;;;;;AAYT,SAAS,6BACP,MACA,mBACA,wBAC6C;CAE7C,MAAM,aAAa,WAAW,SAAS,KAAK,YAAY,KAAK;CAE7D,MAAM,kBAcF;EACF,aAAa,KAAK,UAAU,eAAe;EAC3C,YAAY,KAAK,UAAU,cAAc;EACzC,OAAO;EACP,uBAAuB;EACvB,mBAAmB;EACnB;EACA,qBAAqB;EACtB;AAKD,KAAI,KAAK,UAAU,kBAAkB,OACnC,iBAAgB,sBAAsB,KAAK,SAAS;UAC3C,kBAAkB,IAAI,KAAK,KAAK,CACzC,iBAAgB,sBAAsB;CAIxC,MAAM,MAAM,KAAK,UAAU,iBAAiB;AAC5C,KAAI,IACF,iBAAgB,uBAAuB;EACrC,QAAQ,IAAI,WAAW;EACvB,QAAQ,IAAI,WAAW;EACvB,QAAQ,IAAI,WAAW;EACvB,MAAM,IAAI,SAAS;EACpB;CAGH,MAAM,SAAkF,EAAE;AAE1F,QAAO,KAAK,KAAK,OAAO,CACrB,QAAQ,cAAc,cAAc,KAAK,CACzC,SAAS,cAAc;EACtB,MAAM,cAAc,KAAK,OAAO,WAAW;EAC3C,MAAM,YAAY,YAAY;EAC9B,MAAM,aAAsE;GAC1E,MAAM;GACN,eAAe,cAAc,SAAS,YAAY,iBAAiB,EAAE,GAAG,EAAE;GAC1E,aAAa,YAAY,eAAe;GACxC,UAAU,qBAAqB,YAAY;GAC3C,OAAO,YAAY,SAAS;GAC5B,OAAO,YAAY,SAAS;GAC5B,QAAQ,YAAY,UAAU;GAC9B,YAAY,YAAY,cAAc;GACtC,gBAAgB,YAAY;GAC5B,iBAAiB,YAAY;GAC7B,UAAU,YAAY,aAAa;GACnC,QAAQ,YAAY,UAAU;GAC9B,GAAG,kBAAkB,YAAY;GACjC,GAAI,YAAY,UAAU,EACxB,QAAQ;IACN,OAAO,YAAY,OAAO;IAC1B,GAAI,YAAY,OAAO,YAAY,EACjC,UAAU,YAAY,OAAO,UAC9B;IACD,GAAI,YAAY,OAAO,UAAU,EAC/B,QAAQ,YAAY,OAAO,QAC5B;IACF,EACF;GACD,GAAI,YAAY,UAAU,UAAa,EAAE,OAAO,YAAY,OAAO;GACpE;AAGD,MAAI,YAAY,SAAS,YAAY,YAAY,OAC/C,YAAW,SAAS,oBAAoB,YAAY,OAAO;AAG7D,SAAO,aAAa;GACpB;CAEJ,MAAM,gBAGF,EAAE;AAEN,MAAK,MAAM,CAAC,cAAc,QAAQ,OAAO,QAAQ,KAAK,qBAAqB,CACzE,eAAc,gBAAgB;EAC5B,SAAS,IAAI;EACb,UAAU,IAAI;EACd,UAAU,IAAI;EACd,OAAO,IAAI;EACX,aAAa,IAAI;EAClB;AAGH,MAAK,MAAM,CAAC,cAAc,QAAQ,OAAO,QAAQ,KAAK,sBAAsB,CAC1E,eAAc,gBAAgB;EAC5B,SAAS,IAAI;EACb,UAAU,IAAI;EACd,UAAU,IAAI;EACd,OAAO,IAAI;EACX,aAAa,IAAI;EAClB;CAIH,MAAM,UAA6E,EAAE;AACrF,KAAI,KAAK,QACP,QAAO,QAAQ,KAAK,QAAQ,CAAC,SAAS,CAAC,KAAK,WAAW;AACrD,UAAQ,OAAO;GACb,YAAY,MAAM;GAClB,QAAQ,MAAM,UAAU;GACzB;GACD;CAIJ,MAAM,QAAgF,EAAE;AACxF,KAAI,KAAK,MACP,QAAO,QAAQ,KAAK,MAAM,CAAC,SAAS,CAAC,KAAK,iBAAiB;AACzD,QAAM,OAAO,EAAE,aAAa,eAAe,IAAI;GAC/C;CAWJ,MAAM,aAAa,KAAK,YAAY,SAChC,gBAAgB,KAAK,YAAY,OAAO,GAPsC;EAChF,QAAQ,EAAE;EACV,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,QAAQ,EAAE;EACX;AAKD,QAAO;EACL,MAAM,KAAK;EACX,QAAQ;GACN,aAAa,KAAK,eAAe;GACjC;GACe;GACf,UAAU;GACV,SAAS;GACT,YAAY,EAAE;GACd;GACA;GACA;GACD;EACF;;AAGH,SAAS,qBACP,aACqE;AACrE,SAAQ,YAAY,YAAY,EAAE,EAAE,KAAK,SAAS;EAChD,QAAQ,0BAA0B;EAClC,cAAc,IAAI,gBAAgB;EAClC,GAAI,IAAI,UAAU,EAChB,QAAQ,EACN,MAAM,IAAI,OAAO,OAAO,IAAI,IAAI,OAAO,SAAS,IACjD,EACF;EACF,EAAE;;AAGL,SAAS,kBACP,aAC+F;AAC/F,KAAI,CAAC,YAAY,MACf,QAAO,EAAE;AAEX,QAAO,EACL,OAAO;EACL,QAAQ,YAAY,MAAM,SACtB,EACE,MAAM,YAAY,MAAM,OAAO,QAAQ,IACxC,GACD;EACJ,QAAQ,YAAY,MAAM,SACtB,EACE,MAAM,YAAY,MAAM,OAAO,QAAQ,IACxC,GACD;EACL,EACF;;AAGH,SAAS,oBACP,QACyE;CACzE,MAAM,eAAwF,EAAE;AAEhG,QAAO,QAAQ,OAAO,CAAC,SAAS,CAAC,iBAAiB,uBAAuB;EACvE,MAAM,aAAa,kBAAkB;AAErC,MAAI,eAAe,YAAY,kBAAkB,QAAQ;GACvD,MAAM,mBAAmB,oBAAoB,kBAAkB,OAAO;AACtE,gBAAa,mBAAmB;IAC9B,MAAM;IACN,eAAe,kBAAkB,iBAAiB,EAAE;IACpD,aAAa,kBAAkB,eAAe;IAC9C,UAAU,qBAAqB,kBAAkB;IACjD,UAAU,kBAAkB,YAAY;IACxC,OAAO,kBAAkB,SAAS;IAClC,OAAO;IACP,QAAQ;IACR,YAAY;IACZ,QAAQ;IACR,GAAG,kBAAkB,kBAAkB;IACvC,QAAQ;IACR,GAAI,kBAAkB,UAAU,UAAa,EAAE,OAAO,kBAAkB,OAAO;IAChF;QAED,cAAa,mBAAmB;GAC9B,MAAM;GACN,eAAe,eAAe,SAAS,kBAAkB,iBAAiB,EAAE,GAAG,EAAE;GACjF,aAAa,kBAAkB,eAAe;GAC9C,UAAU,qBAAqB,kBAAkB;GACjD,UAAU,kBAAkB,YAAY;GACxC,OAAO,kBAAkB,SAAS;GAClC,OAAO;GACP,QAAQ;GACR,YAAY;GACZ,QAAQ;GACR,GAAG,kBAAkB,kBAAkB;GACvC,GAAI,kBAAkB,UAAU,EAC9B,QAAQ;IACN,OAAO,kBAAkB,OAAO;IAChC,GAAI,kBAAkB,OAAO,YAAY,EACvC,UAAU,kBAAkB,OAAO,UACpC;IACD,GAAI,kBAAkB,OAAO,UAAU,EACrC,QAAQ,kBAAkB,OAAO,QAClC;IACF,EACF;GACD,GAAI,kBAAkB,UAAU,UAAa,EAAE,OAAO,kBAAkB,OAAO;GAChF;GAEH;AAEF,QAAO;;AAGT,SAAS,gBACP,YACwD;CACxD,MAAM,MAA8D,EAAE;AACtE,MAAK,MAAM,CAAC,KAAK,aAAa,OAAO,QAAQ,WAAW,CACtD,KAAI,OAA6C,SAAS,KAAK,WAAW,YAAY,OAAO,CAAC;AAEhG,QAAO;;AAGT,SAAS,YACP,QAC+D;CAC/D,IAAI;AACJ,SAAQ,OAAO,QAAf;EACE,KAAK;AACH,YAAS,+BAA+B;AACxC;EACF,KAAK;AACH,YAAS,+BAA+B;AACxC;EACF,QACE,OAAM,IAAI,MAAM,uBAAuB,OAAO,SAAyB;;AAE3E,QAAO;EACL,YAAY,OAAO,WAAW,KAAK,SAAS,eAAe,KAAK,CAAC;EACjE;EACA,aAAa,OAAO;EACrB;;AAGH,SAAS,eACP,WACkE;CAClE,MAAM,CAAC,MAAM,UAAU,SAAS;CAEhC,MAAM,IAAI,aAAa,KAAK;CAC5B,MAAM,IAAI,aAAa,MAAM;CAC7B,IAAI;AACJ,SAAQ,UAAR;EACE,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,KAAK;AACH,QAAK,iCAAiC;AACtC;EACF,QACE,OAAM,IAAI,MAAM,qBAAqB,WAA2B;;AAEpE,QAAO;EACL,MAAM;EACN,UAAU;EACV,OAAO;EACR;;AAGH,SAAS,aACP,SACgE;AAChE,KAAI,OAAO,YAAY,YAAY,CAAC,MAAM,QAAQ,QAAQ,CACxD,KAAI,UAAU,QACZ,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,QAAQ;EAChB,EACF;UACQ,YAAY,QACrB,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,QAAQ;EAChB,EACF;UACQ,eAAe,QACxB,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,QAAQ;EAChB,EACF;UACQ,eAAe,QACxB,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,QAAQ;EAChB,EACF;KAED,OAAM,IAAI,MAAM,oBAAoB,KAAK,UAAU,QAAQ,GAAG;AAIlE,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,SAAS,aAAa,QAAQ;EACtC,EACF;;AAkBH,eAAe,mBACb,QACA,aACA,WACA,iBACA;CACA,MAAM,YAAY,gBAChB,0BACD;CAED,MAAM,uBAAuB,kBAA0B;AACrD,SAAO,SAAS,OAAO,WAAW,gBAAgB;AAChD,OAAI;IACF,MAAM,EAAE,aAAa,kBAAkB,MAAM,OAAO,2BAA2B;KAC7E;KACA;KACA;KACA,UAAU;KACX,CAAC;AACF,WAAO,CAAC,aAAa,cAAc;YAC5B,OAAO;AACd,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,QAAO,CAAC,EAAE,EAAE,GAAG;AAEjB,UAAM;;IAER;;AAGJ,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,yBAAyB,MAAM,oBAAoB,SAAS,UAAU;EAC5E,MAAM,kCAAkB,IAAI,KAAa;AACzC,yBAAuB,SAAS,kBAAkB;AAChD,mBAAgB,IAAI,cAAc,SAAS;IAC3C;EAEF,MAAM,QAAQ,SAAS;AACvB,OAAK,MAAM,YAAY,OAAO,KAAK,MAAM,EAAE;GACzC,MAAM,gBAAgB,MAAM,UAAU,YAAY;AAClD,OAAI,CAAC,cACH;AAEF,OAAI,gBAAgB,IAAI,SAAS,EAAE;AACjC,cAAU,QAAQ,KAAK;KACrB,MAAM;KACN,SAAS;MACP;MACA,eAAe,SAAS;MACd;MACV,YAAY,mBAAmB,cAAc;MAC9C;KACF,CAAC;AACF,oBAAgB,OAAO,SAAS;SAEhC,WAAU,QAAQ,KAAK;IACrB,MAAM;IACN,SAAS;KACP;KACA,eAAe,SAAS;KACd;KACV,YAAY,mBAAmB,cAAc;KAC9C;IACF,CAAC;;AAGN,kBAAgB,SAAS,SAAS;AAChC,aAAU,QAAQ,KAAK;IACrB;IACA,SAAS;KACP;KACA,eAAe,SAAS;KACxB,UAAU;KACX;IACF,CAAC;IACF;;AAEJ,MAAK,MAAM,iBAAiB,gBAE1B,EAD+B,MAAM,oBAAoB,cAAc,EAChD,SAAS,kBAAkB;AAChD,YAAU,QAAQ,KAAK;GACrB,MAAM,cAAc;GACpB,SAAS;IACP;IACA;IACA,UAAU,cAAc;IACzB;GACF,CAAC;GACF;AAEJ,QAAO;;AAGT,SAAS,mBACP,YACsD;AACtD,QAAO,EACL,UAAU,WAAW,KAAK,WAAW,eAAe,OAAO,CAAC,EAC7D;;AAGH,SAAS,eACP,QAC6D;CAC7D,MAAM,UAA0C,EAAE;AAClD,MAAK,MAAM,UAAU,OAAO,QAC1B,SAAQ,QAAR;EACE,KAAK;AACH,WAAQ,KAAK,6BAA6B,IAAI;AAC9C;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,OAAO;AACjD;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,KAAK;AAC/C;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,OAAO;AACjD;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,OAAO;AACjD;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,UAAU;AACpD;EACF,KAAK;AACH,WAAQ,KAAK,6BAA6B,YAAY;AACtD;EACF,QACE,OAAM,IAAI,MAAM,mBAAmB,SAAyB;;CAGlE,IAAI;AACJ,SAAQ,OAAO,QAAf;EACE,KAAK;AACH,YAAS,6BAA6B;AACtC;EACF,KAAK;AACH,YAAS,6BAA6B;AACtC;EACF,QACE,OAAM,IAAI,MAAM,uBAAuB,OAAO,SAAyB;;AAE3E,QAAO;EACL,YAAY,OAAO,WAAW,KAAK,SAAS,kBAAkB,KAAK,CAAC;EACpE;EACA;EACA,aAAa,OAAO;EACrB;;AAGH,SAAS,kBACP,WACgE;CAChE,MAAM,CAAC,MAAM,UAAU,SAAS;CAEhC,MAAM,IAAI,gBAAgB,KAAK;CAC/B,MAAM,IAAI,gBAAgB,MAAM;CAChC,IAAI;AACJ,SAAQ,UAAR;EACE,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,KAAK;AACH,QAAK,+BAA+B;AACpC;EACF,QACE,OAAM,IAAI,MAAM,qBAAqB,WAA2B;;AAEpE,QAAO;EACL,MAAM;EACN,UAAU;EACV,OAAO;EACR;;AAGH,SAAS,gBACP,SAC8D;AAC9D,KAAI,OAAO,YAAY,YAAY,CAAC,MAAM,QAAQ,QAAQ,EACxD;MAAI,UAAU,QACZ,QAAO,EACL,MAAM;GACJ,MAAM;GACN,OAAO,QAAQ;GAChB,EACF;;AAIL,QAAO,EACL,MAAM;EACJ,MAAM;EACN,OAAO,SAAS,aAAa,QAAQ;EACtC,EACF;;;;;;;;AAoBH,eAAe,oBACb,kBACA,0BACiC;CACjC,MAAM,UAAkC,EAAE;AAE1C,MAAK,MAAM,EAAE,WAAW,mBAAmB,0BAA0B;EACnE,MAAM,aAAa,iBAAiB,IAAI,UAAU;AAClD,MAAI,CAAC,WACH;EAIF,IAAI;AACJ,MAAI;AACF,sBAAmB,kCAAkC,cAAc;UAC7D;AAEN,WAAQ,KAAK;IACX;IACA;IACA,SAAS;IACV,CAAC;AACF;;AAGF,MAAI,CAAC,kBAAkB;AAErB,WAAQ,KAAK;IACX;IACA;IACA,SAAS;IACT,MAAM;IACP,CAAC;AACF;;EAIF,MAAM,OAAO,8BAA8B,kBAAkB,YAAY,UAAU;AAEnF,UAAQ,KAAK;GACX;GACA;GACA,SAAS,WAAW,KAAK;GACzB,MAAM,WAAW,KAAK,GAAG,OAAO;GACjC,CAAC;;AAGJ,QAAO;;;;;;;AAQT,SAAS,4BAA4B,SAAyC;CAC5E,MAAM,QAAkB,EAAE;AAE1B,MAAK,MAAM,UAAU,SAAS;AAC5B,MAAI,CAAC,OAAO,QACV;AAGF,QAAM,KAAK,cAAc,OAAO,YAAY;AAE5C,MAAI,CAAC,OAAO,KACV,OAAM,KACJ,qFACD;OACI;AACL,SAAM,KAAK,KAAK,kBAAkB,OAAO,KAAK,GAAG;AACjD,SAAM,KAAK,GAAG;AACd,SAAM,KAAK,oBAAoB,OAAO,KAAK,CAAC;;AAE9C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;;;;;;;;;;;ACn6DzB,eAAsB,cACpB,QACA,QACA,QAAyD,iBACzD;CACA,MAAM,EAAE,WAAW,YAAY;AAC/B,KAAI,UAAU,iBAAiB;EAE7B,MAAM,sBAAsB,MAAM,qBAAqB,QAAQ,WAAW,QAAQ;AAIlF,QAAM,QAAQ,IAAI,CAChB,GAAG,UAAU,QAAQ,IAAI,OAAO,aAAW;GACzC,MAAM,mBAAmB,0BACvB,qBACAC,SAAO,aACR;AACD,SAAM,OAAO,eAAe;IAC1B,aAAaA,SAAO;IACpB,cAAcA,SAAO,SAAS;IAC9B,qBAAqBA,SAAO,SAAS,QAAQ;IAC7C,cAAc;IACf,CAAC;AACF,SAAM,OAAO,YAAYA,SAAO,YAAY;IAC5C,EACF,GAAG,UAAU,QAAQ,IAAI,OAAO,WAAW;GACzC,MAAM,mBAAmB,0BACvB,qBACA,OAAO,aACR;AACD,SAAM,OAAO,eAAe;IAC1B,aAAa,OAAO;IACpB,cAAc,OAAO,SAAS;IAC9B,qBAAqB,OAAO,SAAS,QAAQ;IAC7C,cAAc;IACf,CAAC;AACF,SAAM,OAAO,YAAY,OAAO,YAAY;IAC5C,CACH,CAAC;YACO,UAAU,SAEnB,OAAM,QAAQ,IACZ,UAAU,QAAQ,KAAK,QACrB,OAAO,eAAe;EACpB,aAAa,IAAI;EACjB,YAAY,IAAI;EACjB,CAAC,CACH,CACF;;;;;;;;AAUL,SAAS,0BACP,aACA,cAC2B;CAC3B,MAAM,WAAsC,EAAE;AAC9C,MAAK,MAAM,WAAW,aACpB,KAAI,YAAY,aAAa,OAC3B,UAAS,WAAW,YAAY;AAGpC,QAAO;;;;;;;;;;;;AAaT,eAAe,qBACb,QACA,WACA,SACoC;CACpC,MAAM,sBAAiD,EAAE;CAGzD,MAAM,gBAAgB,UAAU,QAAQ,MAAM,UAAU,QAAQ;AAChE,KAAI,CAAC,cACH,QAAO;CAGT,MAAM,EAAE,gBAAgB;CAGxB,MAAM,kCAAkB,IAAI,KAAa;AACzC,MAAK,MAAM,QAAQ,CAAC,GAAG,UAAU,SAAS,GAAG,UAAU,QAAQ,CAC7D,MAAK,MAAM,WAAW,KAAK,aACzB,iBAAgB,IAAI,QAAQ;CAKhC,MAAM,uBAAuB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EAC5E,MAAM,WAAW,MAAM,OAAO,yBAAyB;GACrD;GACA;GACA,UAAU;GACX,CAAC;AACF,SAAO,CAAC,SAAS,aAAa,KAAK,MAAM,EAAE,KAAK,EAAE,SAAS,cAAc;GACzE;CACF,MAAM,sBAAsB,IAAI,IAAI,qBAAqB;CAIzD,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,KAAK,gBAAgB,CAAC,IAAI,OAAO,YAAY;EAEjD,MAAM,WADa,oBAAoB,IAAI,QAAQ,GAE/C,MAAM,OAAO,0BAA0B;GACrC;GACA,iBAAiB;GACjB,WAAW,wBAAwB,QAAQ;GAC5C,CAAC,GACF,MAAM,OAAO,0BAA0B;GACrC;GACA,iBAAiB;GACjB,WAAW,wBAAwB,QAAQ;GAC5C,CAAC;AAGN,QAAM,OAAO,YACX,MAAM,iBAAiB,eAAe,aAAa,QAAQ,EAAE,QAAQ,CACtE;AAED,SAAO;GAAE;GAAS,SAAS,SAAS,aAAa;GAAS;GAC1D,CACH;AAED,MAAK,MAAM,EAAE,SAAS,aAAa,QACjC,KAAI,QACF,qBAAoB,WAAW;CAKnC,MAAM,qBAAqB,qBAAqB,QAC7C,YAAY,CAAC,gBAAgB,IAAI,QAAQ,CAC3C;AACD,OAAM,QAAQ,IACZ,mBAAmB,IAAI,OAAO,YAAY;EACxC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAK,eAAe,aAAa,QAAQ,EAC1C,CAAC;AAIF,MAHc,UAAU,SAAS,qBAGnB,QACZ,OAAM,OAAO,YAAY;GACvB,KAAK,eAAe,aAAa,QAAQ;GACzC,QAAQ,GAAG,kBAAkB,IAAI;GAClC,CAAC;GAEJ,CACH;AAED,QAAO;;AAyBT,SAAS,YAAY,aAAqB,MAAc;AACtD,QAAO,oBAAoB,YAAY,YAAY;;AAGrD,SAAS,eAAe,aAAqB,MAAc;AACzD,QAAO,oBAAoB,YAAY,yBAAyB;;;;;;;;;;;AAYlE,eAAsB,aACpB,QACA,aACA,SACA,WACA,aACA;CACA,MAAM,YAAY,gBAAgE,YAAY;CAC9F,MAAM,YAA6B,EAAE;CACrC,MAAM,YAAiC,EAAE;CACzC,MAAM,iCAAiB,IAAI,KAAa;CAGxC,MAAM,eAAe,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACpE,MAAM,WAAW,MAAM,OAAO,cAAc;GAC1C;GACA;GACA,UAAU;GACX,CAAC;AACF,SAAO,CAAC,SAAS,UAAU,KAAK,OAAO;GAAE,IAAI,EAAE;GAAI,MAAM,EAAE;GAAM,EAAE,EAAE,SAAS,cAAc;GAC5F;CACF,MAAM,oBAA8D,EAAE;AACtE,OAAM,QAAQ,IACZ,aAAa,IAAI,OAAO,aAAa;EACnC,MAAM,EAAE,aAAa,MAAM,OAAO,YAAY,EAC5C,KAAK,YAAY,aAAa,SAAS,KAAK,EAC7C,CAAC;AACF,oBAAkB,SAAS,QAAQ;GACjC;GACA,OAAO,UAAU,OAAO;GACzB;GACD,CACH;AAED,MAAK,MAAM,YAAY,OAAO,OAAO,UAAU,EAAE;EAC/C,MAAM,WAAW,kBAAkB,SAAS;EAC5C,MAAM,cAAc,MAAM,iBAAiB,YAAY,aAAa,SAAS,KAAK,EAAE,QAAQ;EAE5F,MAAM,eAAe,YAAY,SAAS,QAAQ;AAClD,MAAI,CAAC,aACH,OAAM,IAAI,MACR,QAAQ,SAAS,QAAQ,KAAK,0BAA0B,SAAS,KAAK,oMAKlD,SAAS,QAAQ,KAAK,gCAAgC,SAAS,QAAQ,KAAK,WACjG;AAGH,MAAI,UAAU;AACZ,OAAI,CAAC,SAAS,MACZ,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACxB,CAAC;YACO,SAAS,UAAU,QAC5B,WAAU,KAAK;IACb,cAAc;IACd,cAAc,SAAS;IACvB,cAAc,SAAS;IACxB,CAAC;AAGJ,aAAU,QAAQ,KAAK;IACrB,MAAM,SAAS;IACf;IACA;IACA;IACA;IACD,CAAC;AACF,UAAO,kBAAkB,SAAS;QAElC,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAS;GACf;GACA;GACA;GACA;GACD,CAAC;;AAIN,QAAO,OAAO,kBAAkB,CAAC,SAAS,aAAa;EACrD,MAAM,QAAQ,UAAU;AACxB,MAAI,SAAS,UAAU,QACrB,gBAAe,IAAI,MAAM;AAG3B,MAAI,UAAU,QACZ,WAAU,QAAQ,KAAK;GACrB,MAAM,SAAU,SAAS;GACzB;GACA,YAAY,SAAU,SAAS;GAChC,CAAC;GAEJ;AAEF,WAAU,OAAO;AACjB,QAAO;EAAE;EAAW;EAAW;EAAW;EAAgB;EAAS;;;;;;;;;;ACjQrE,eAAsB,MAAM,SAAwB;AAClD,QAAO,SAAS,SAAS,OAAO,aAAa;AAC3C,WAAS,aAAa,iBAAiB,SAAS,UAAU,MAAM;EAGhE,MAAM,EAAE,QAAQ,aAAa,qBAAqB,cAAc,MAAM,SACpE,SACA,YAAY;GACV,MAAM,EAAE,kBAAQ,YAAY,MAAM,SAAS,0BACzC,WAAW,SAAS,WAAW,CAChC;GAED,MAAMC,WAAS,SAAS,UAAU;GAClC,MAAMC,cACJ,SAAS,aAAa,QAAQ,IAAI,mCAAmC;GACvE,MAAM,UAAU,SAAS,WAAW;GAGpC,MAAM,cAAc,MAAM,iBAAiB;GAC3C,MAAM,WAAW,KAAK,QAAQ,YAAY,EAAE,QAAQ;AACpD,OAAI,SAAS,YAAY;AACvB,SAAG,OAAO,UAAU;KAAE,WAAW;KAAM,OAAO;KAAM,CAAC;AACrD,WAAO,KAAK,uBAAuB;;GAErC,MAAM,YAAY,KAAK,QAAQC,SAAO,KAAK;GAC3C,MAAM,eACJ,WAAW,kBAAkB,EAAE,KAAK,WAAW,CAAC,IAChD,WAAW,qBAAqB,EAAE,KAAK,WAAW,CAAC,IACnD,WAAW,aAAa,EAAE,KAAK,WAAW,CAAC,IAC3C,WAAW,YAAY,EAAE,KAAK,WAAW,CAAC;GAC5C,MAAM,eAAe,mBAAmB;IACtC,SAAS,CAAC;IACV;IACA,YAAY,YAAY,WAAW;IACnC,cAAc,eAAe,SAAS,aAAa,GAAG;IACvD,CAAC;GAEF,IAAI;AACJ,OAAI,QAAQ,SAAS,EACnB,iBAAgB,IAAI,cAAc,QAAQ;AAG5C,SAAM,SAAS,iCACb,kBAAkB;IAAE;IAAQ,YAAYA,SAAO;IAAM,CAAC,CACvD;GAED,IAAIC;GACJ,IAAIC;AACJ,OAAI;IACF,MAAM,SAAS,MAAM,SAAS,+BAC5B,gBAAgB;KAAE;KAAQ;KAAe,aAAa,aAAa;KAAa,CAAC,CAClF;AACD,oBAAc,OAAO;AACrB,4BAAsB,OAAO;aACrB;AAGR,iBAAa,UAAU;;AAGzB,UAAO;IAAE;IAAQ;IAAS;IAAa;IAAqB;IAAQ;IAAW;IAElF;AACD,MAAI,UAAW;EAOf,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;GACxC,YAAY;GACZ,SAAS,SAAS;GACnB,CAAC,CACkD;EACpD,MAAM,cAAc,gBAAgB;GAClC,aAAa,SAAS;GACtB,SAAS,SAAS;GACnB,CAAC;AAEF,WAAS,aAAa,YAAY,YAAY,KAAK;AACnD,WAAS,aAAa,gBAAgB,YAAY;EAGlD,MAAM,kBAAkB,YAAY;EACpC,MAAM,kBAAkB,uBAAuB,aAAa,iBAAiB,QAAQ,EAAE,CAAC;EAExF,MAAM,SAAS,SAAS,UAAU;EAClC,MAAM,MAAM,SAAS,OAAO;EAG5B,MAAM,EACJ,kBACA,UACA,eACA,KACA,MACA,UACA,KACA,UACA,UACA,kBACE,MAAM,SAAS,QAAQ,YAAY;GACrC,MAAM,MAAmB;IACvB;IACA;IACA;IACA,YAAY;IACZ;IACA,eAAe,SAAS;IACzB;GACD,MAAM,CACJC,oBACAC,YACAC,iBACAC,OACAC,QACAC,YACAC,OACAC,YACAC,YACAC,mBACE,MAAM,QAAQ,IAAI;IACpB,SAAS,+BACP,qBAAqB,QAAQ,aAAa,YAAY,MAAM,gBAAgB,CAC7E;IACD,SAAS,uBAAuB,aAAa,IAAI,CAAC;IAClD,SAAS,4BAA4B,kBAAkB,IAAI,CAAC;IAC5D,SAAS,kBAAkB,QAAQ,IAAI,CAAC;IACxC,SAAS,mBAAmB,SAAS,IAAI,CAAC;IAC1C,SAAS,uBAAuB,aAAa,IAAI,CAAC;IAClD,SAAS,0BAA0B,gBAAgB,IAAI,CAAC;IACxD,SAAS,uBAAuB,aAAa,IAAI,CAAC;IAClD,SAAS,uBACP,aACE,QACA,aACA,YAAY,MACZ,iBAAiB,aAAa,EAAE,EAChC,qBAAqB,eAAe,EAAE,CACvC,CACF;IACD,SAAS,4BAA4B,kBAAkB,IAAI,CAAC;IAC7D,CAAC;AACF,UAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACD;IACD;AAGF,QAAM,SAAS,WAAW,YAAY;GACpC,MAAM,eAAgC;IACpC,GAAG,iBAAiB;IACpB,GAAG,SAAS;IACZ,GAAG,cAAc;IACjB,GAAG,IAAI;IACP,GAAG,KAAK;IACR,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,cAAc;IAClB;AACD,SAAM,qBAAqB,cAAc,YAAY,MAAM,IAAI;AAa/D,SAAM,0BAXoC;IACxC,GAAG,iBAAiB;IACpB,GAAG,SAAS;IACZ,GAAG,cAAc;IACjB,GAAG,IAAI;IACP,GAAG,KAAK;IACR,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,cAAc;IAClB,EAC6C,YAAY,MAAM,IAAI;GAEpE,MAAM,qBAAkD,EAAE;AAC1D,QAAK,MAAM,OAAO,SAAS,UAAU,KAAK,QACxC,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;AAEJ,QAAK,MAAM,OAAO,cAAc,UAAU,QACxC,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;AAEJ,QAAK,MAAM,OAAO,KAAK,UAAU,aAAa,QAC5C,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;AAEJ,QAAK,MAAM,WAAW,KAAK,UAAU,aAAa,SAChD,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,QAAQ;IACvB,CAAC;AAEJ,QAAK,MAAM,OAAO,cAAc,eAAe,QAC7C,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;AAEJ,QAAK,MAAM,OAAO,cAAc,gBAAgB,QAC9C,oBAAmB,KAAK;IACtB,cAAc;IACd,cAAc,IAAI;IACnB,CAAC;AAEJ,SAAM,iCAAiC,oBAAoB,IAAI;GAG/D,MAAM,iBAAiB,IAAI,IAAI;IAC7B,GAAG,iBAAiB;IACpB,GAAG,SAAS;IACZ,GAAG,cAAc;IACjB,GAAG,IAAI;IACP,GAAG,KAAK;IACR,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,SAAS;IACZ,GAAG,cAAc;IAClB,CAAC;GAEF,MAAM,YAAY,CAAC,GADI,IAAI,IAAI,aAAa,KAAK,MAAM,EAAE,aAAa,CAAC,CAClC,CAAC,QAAQ,UAAU,CAAC,eAAe,IAAI,MAAM,CAAC;AACnF,QAAK,MAAM,YAAY,UACrB,KAAI,QAAQ,KAAK;IACf,MAAM;IACN,SAAS;KACP;KACA,iBAAiB;KAClB;IACF,CAAC;IAEJ;AAEF,MAAI,QAAQ;AACV,UAAO,KAAK,uCAAuC;AACnD;;AAIF,QAAM,SAAS,8BAA8B,YAAY;AACvD,SAAM,mBAAmB,QAAQ,eAAe,iBAAiB,YAAY;AAC7E,SAAM,sBAAsB,QAAQ,aAAa,kBAAkB,gBAAgB;AACnF,SAAM,mBAAmB,QAAQ,eAAe,gBAAgB;AAChE,SAAM,SAAS,QAAQ,KAAK,gBAAgB;AAC5C,SAAM,UAAU,QAAQ,MAAM,gBAAgB;AAC9C,SAAM,cAAc,QAAQ,UAAU,gBAAgB;AACtD,SAAM,cAAc,QAAQ,UAAU,gBAAgB;IACtD;AAGF,QAAM,SAAS,iCAAiC,YAAY;AAC1D,SAAM,cAAc,QAAQ,UAAU,mBAAmB;AACzD,SAAM,UAAU,QAAQ,MAAM,mBAAmB;AACjD,SAAM,SAAS,QAAQ,KAAK,mBAAmB;IAC/C;AAGF,QAAM,SAAS,uCACb,iBAAiB,QAAQ,KAAK,gBAAgB,CAC/C;AAGD,QAAM,SAAS,uCAAuC,YAAY;AAChE,SAAM,cAAc,QAAQ,UAAU,gBAAgB;AACtD,SAAM,cAAc,QAAQ,UAAU,gBAAgB;IACtD;AAGF,QAAM,SAAS,iCAAiC,YAAY;AAC1D,SAAM,cAAc,QAAQ,UAAU,SAAS;AAC/C,SAAM,cAAc,QAAQ,UAAU,SAAS;AAC/C,SAAM,mBAAmB,QAAQ,eAAe,SAAS;AACzD,SAAM,mBAAmB,QAAQ,eAAe,SAAS;IACzD;AAGF,QAAM,SAAS,iCAAiC,iBAAiB,QAAQ,KAAK,SAAS,CAAC;AAGxF,QAAM,SAAS,gCAAgC,YAAY;AACzD,SAAM,cAAc,QAAQ,UAAU,kBAAkB;AACxD,SAAM,UAAU,QAAQ,MAAM,kBAAkB;AAChD,SAAM,SAAS,QAAQ,KAAK,kBAAkB;AAC9C,SAAM,cAAc,QAAQ,UAAU,kBAAkB;IACxD;AAGF,QAAM,SAAS,uBACb,sBAAsB,QAAQ,aAAa,kBAAkB,SAAS,CACvE;AAED,SAAO,QAAQ,gCAAgC;GAC/C;;;;;;;;;;ACtWJ,SAAgB,0BAA0B,QAAwB;AAChE,SAAQ,QAAR;EACE,KAAK,UACH,QAAO,OAAO,IAAI,OAAO;EAC3B,KAAK,UACH,QAAO,OAAO,KAAK,OAAO;EAC5B,KAAK,UACH,QAAO,OAAO,QAAQ,OAAO;EAC/B,KAAK,SACH,QAAO,OAAO,MAAM,OAAO;EAC7B,KAAK,WACH,QAAO,OAAO,QAAQ,OAAO;EAC/B,QACE,QAAO;;;;;;;;AASb,SAAgB,4BAA4B,QAAoC;AAC9E,QACE,WAAW,kBAAkB,WAC7B,WAAW,kBAAkB,UAC7B,WAAW,kBAAkB;;;;;;;AASjC,SAAgB,uBAAuB,QAAmC;AAExE,SADoB,OAAO,aAAa,EACxC;EACE,KAAK,UACH,QAAO,kBAAkB;EAC3B,KAAK,UACH,QAAO,kBAAkB;EAC3B,KAAK,UACH,QAAO,kBAAkB;EAC3B,KAAK,SACH,QAAO,kBAAkB;EAC3B,KAAK,WACH,QAAO,kBAAkB;EAC3B,QACE,OAAM,IAAI,MACR,mBAAmB,OAAO,6DAC3B;;;;;;;;AAaP,SAAgB,gCAAgC,QAAwB;AACtE,SAAQ,QAAR;EACE,KAAK,UACH,QAAO,OAAO,KAAK,OAAO;EAC5B,KAAK,UACH,QAAO,OAAO,QAAQ,OAAO;EAC/B,KAAK,SACH,QAAO,OAAO,MAAM,OAAO;EAC7B,QACE,QAAO;;;;;;;;AASb,SAAgB,kCAAkC,QAA2C;AAC3F,QAAO,WAAW,yBAAyB,WAAW,WAAW,yBAAyB;;;;;;;AAY5F,SAAgB,2BAA2B,YAAwC;AACjF,SAAQ,YAAR;EACE,KAAK,mBAAmB,QACtB,QAAO;EACT,KAAK,mBAAmB,eACtB,QAAO;EACT,KAAK,mBAAmB,SACtB,QAAO;EACT,KAAK,mBAAmB,aACtB,QAAO;EACT,KAAK,mBAAmB,SACtB,QAAO;EACT,QACE,QAAO;;;;;;;;AASb,SAAgB,4BAA4B,aAA0C;AACpF,SAAQ,aAAR;EACE,KAAK,oBAAoB,SACvB,QAAO;EACT,KAAK,oBAAoB,MACvB,QAAO;EACT,KAAK,oBAAoB,iBACvB,QAAO;EACT,QACE,QAAO;;;;;;AC5Gb,SAAS,0BAA0B,QAAmC;AACpE,SAAQ,QAAR;EACE,KAAK,kBAAkB,QACrB,QAAO;EACT,KAAK,kBAAkB,QACrB,QAAO;EACT,KAAK,kBAAkB,QACrB,QAAO;EACT,KAAK,kBAAkB,OACrB,QAAO;EACT,KAAK,kBAAkB,SACrB,QAAO;EACT,QACE,QAAO;;;;;;;;AASb,SAAgB,sBAAsB,KAAuC;AAC3E,QAAO;EACL,IAAI,IAAI;EACR,cAAc,IAAI;EAClB,QAAQ,0BAA0B,IAAI,OAAO;EAC7C,WAAW,IAAI,YAAY,cAAc,IAAI,UAAU,CAAC,aAAa,GAAG;EACzE;;;;;;;AAQH,SAAgB,kBAAkB,KAAmC;AACnE,QAAO;EACL,IAAI,IAAI;EACR,cAAc,IAAI;EAClB,QAAQ,0BAA0B,IAAI,OAAO;EAC7C,aAAa,IAAI,cAAc,cAAc,IAAI,YAAY,CAAC,aAAa,GAAG;EAC9E,WAAW,IAAI,YAAY,cAAc,IAAI,UAAU,CAAC,aAAa,GAAG;EACxE,WAAW,IAAI,YAAY,cAAc,IAAI,UAAU,CAAC,aAAa,GAAG;EACzE;;;;;;;AAQH,SAAgB,yBAAyB,SAAqD;AAC5F,QAAO;EACL,IAAI,QAAQ;EACZ,OAAO,QAAQ;EACf,QAAQ,0BAA0B,QAAQ,OAAO;EACjD,OAAO,QAAQ,SAAS;EACxB,WAAW,QAAQ,YAAY,cAAc,QAAQ,UAAU,CAAC,aAAa,GAAG;EAChF,YAAY,QAAQ,aAAa,cAAc,QAAQ,WAAW,CAAC,aAAa,GAAG;EACnF,oBAAoB,QAAQ,sBAAsB;EACnD;;;;;;;;;;;;AAkCH,SAAS,kBAAkB,UAAoC;CAC7D,MAAM,SAAS,SAAS,eAAe;AACvC,KAAI,CAAC,UAAU,OAAO,SAAS,OAC7B,QAAO,4BAA4B,SAAS,YAAY;AAG1D,SAAQ,OAAO,MAAf;EACE,KAAK,WACH,QAAO,aAAa,OAAO,MAAM,UAAU,IAAI,OAAO,MAAM,SAAS;EACvE,KAAK,QACH,QAAO,mBAAmB,OAAO,MAAM,WAAW,OAAO,MAAM,WAAW,KAAK;EACjF,KAAK,kBACH,QAAO;EACT,QACE,QAAO,4BAA4B,SAAS,YAAY;;;;;;;;;AAU9D,SAAS,mBAAmB,WAAmB,WAA4B;CACzE,MAAM,QAAQ,UAAU,MAAM,IAAI;AAClC,KAAI,MAAM,SAAS,EACjB,QAAO,UAAU;CAGnB,MAAM,CAAC,SAAS,UAAU,UAAU;AAGpC,KAAI,WAAW;EAEb,MAAM,gBAAgB,UAAU,MAAM,2CAA2C;AACjF,MAAI,cACF,QAAO,UAAU,cAAc,GAAG,GAAG;EAIvC,MAAM,oBAAoB,UAAU,MAAM,+CAA+C;AACzF,MAAI,kBACF,QAAO,UAAU,kBAAkB,GAAG,GAAG;;AAK7C,QAAO,UAAU,QAAQ,GAAG,SAAS,GAAG;;;;;;;AAQ1C,SAAS,oBAAoB,UAAqD;CAChF,MAAM,SAAS,SAAS,eAAe;AACvC,KAAI,CAAC,UAAU,OAAO,SAAS,OAC7B,QAAO,EAAE;AAGX,SAAQ,OAAO,MAAf;EACE,KAAK,WACH,QAAO;GACL,UAAU,OAAO,MAAM;GACvB,WAAW,OAAO,MAAM;GACzB;EACH,KAAK,QACH,QAAO;GACL,WAAW,OAAO,MAAM;GACxB,WAAW,OAAO,MAAM,WAAW,QAAQ;GAC5C;EACH,KAAK,kBACH,QAAO,EACL,QAAQ,OAAO,MAAM,SAAS,QAAQ,IACvC;EACH,QACE,QAAO,EAAE;;;;;;;;AASf,SAAS,mBAAmB,UAAqD;CAC/E,MAAM,SAAS,SAAS,cAAc;AACtC,KAAI,CAAC,UAAU,OAAO,SAAS,OAC7B,QAAO,EAAE;AAGX,SAAQ,OAAO,MAAf;EACE,KAAK,UACH,QAAO;GACL,KAAK,OAAO,MAAM,KAAK,QAAQ;GAC/B,SAAS,OAAO,MAAM,QAAQ;GAC/B;EACH,KAAK,gBACH,QAAO;GACL,SAAS,OAAO,MAAM;GACtB,OAAO,OAAO,MAAM;GACrB;EACH,KAAK,WACH,QAAO,EACL,MAAM,OAAO,MAAM,MACpB;EACH,KAAK,WACH,QAAO,EACL,cAAc,OAAO,MAAM,cAC5B;EACH,QACE,QAAO,EAAE;;;;;;;;AASf,SAAgB,mBAAmB,UAA8C;AAC/E,QAAO;EACL,MAAM,SAAS;EACf,aAAa,kBAAkB,SAAS;EACxC,YAAY,2BAA2B,SAAS,WAAW;EAC3D,UAAU,SAAS;EACpB;;;;;;;AAQH,SAAgB,eAAe,UAA0C;AACvE,QAAO;EACL,MAAM,SAAS;EACf,aAAa,SAAS;EACtB,aAAa,kBAAkB,SAAS;EACxC,YAAY,2BAA2B,SAAS,WAAW;EAC3D,UAAU,SAAS;EACnB,eAAe,KAAK,UAAU,oBAAoB,SAAS,EAAE,MAAM,EAAE;EACrE,cAAc,KAAK,UAAU,mBAAmB,SAAS,EAAE,MAAM,EAAE;EACpE;;;;;ACtQH,MAAMC,aAAW,EACf,MAAM,IAAI,EAAE,QAAQ,EAAE;CACpB,YAAY;CACZ,aAAa;CACd,CAAC,EACH;;;;;;;;AAwBD,eAAe,gBACb,QACA,aACA,MACA;CACA,MAAM,EAAE,aAAa,MAAM,OAAO,oBAAoB;EACpD;EACA;EACD,CAAC;AACF,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,aAAa,KAAK,cAAc;AAElD,QAAO;;AAYT,eAAsB,YACpB,SACuB;CAEvB,MAAM,OAAO,UAAU,UAAU,QAAQ,OAAO,QAAQ,SAAS;CAKjE,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;AAEF,KAAI;AAEF,SAAO,eADU,MAAM,gBAAgB,QAAQ,aAAa,KAAK,CAClC;UACxB,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,aAAa,KAAK,cAAc;AAElD,QAAM;;;AAIV,MAAaC,eAAa,cAAc;CACtC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,GAAGD;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,WAAW,MAAM,YAAY;GACjC,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,SAAO,IAAI,UAAU,EACnB,SAAS;GACP,eAAe;GACf,cAAc;GACf,EACF,CAAC;GACF;CACH,CAAC;;;;;;;;;ACzGF,SAAgB,gBAAgB,WAA+C;AAC7E,KAAI,CAAC,UACH,QAAO;CAET,MAAM,OAAO,cAAc,UAAU;AACrC,KAAI,OAAO,MAAM,KAAK,SAAS,CAAC,CAC9B,QAAO;AAET,QAAO;;;;;;;;;AAUT,SAAgB,YAAY,MAAmB,QAAkC;AAC/E,QAAO,MAAM,MAAM;EACjB,GAAG;EACH,QAAQ,oBAAoB,OAAO;EACpC,CAAC;;;;;;;AAQJ,SAAgB,oBAAoB,MAAkC;AACpE,QAAO,YAAY,MAAM,EAAE,YAAY,MAAM,CAAC;;;;;;;AAyChD,SAAgB,qBAAqB,OAAqC;AACxE,KAAI,UAAU,KACZ,QAAO;CAET,MAAM,OAAO,iBAAiB,OAAO,QAAQ,IAAI,KAAK,MAAM;AAC5D,KAAI,OAAO,MAAM,KAAK,SAAS,CAAC,CAC9B,QAAO,OAAO,UAAU,WAAW,QAAQ;AAE7C,QAAO,0BAA0B,MAAM,EAAE,WAAW,MAAM,CAAC;;;;;;;;;;ACrF7D,SAAgB,gCAAgC,QAA0C;AACxF,SAAQ,QAAR;EACE,KAAK,yBAAyB,QAC5B,QAAO;EACT,KAAK,yBAAyB,QAC5B,QAAO;EACT,KAAK,yBAAyB,OAC5B,QAAO;EACT,QACE,QAAO;;;;;;ACVb,MAAa,WAAW,EACtB,MAAM,IAAI,EAAE,QAAQ,EAAE;CACpB,YAAY;CACZ,aAAa;CACd,CAAC,EACH;AAED,MAAa,WAAW;CACtB,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;EACpC,OAAO;EACP,aAAa;EACd,CAAC;CACF,UAAU,IAAI,YAAY,QAAQ,KAAK,EAAE;EACvC,OAAO;EACP,aAAa;EACd,CAAC;CACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;EACpC,OAAO;EACP,aAAa;EACd,CAAC;CACH;;;;;;;;;ACnBD,SAAgB,kCAAkC,QAA2C;AAC3F,QACE,WAAW,yBAAyB,WACpC,WAAW,yBAAyB,UACpC,WAAW,yBAAyB;;;;;;;;;;ACuCxC,SAAS,gCAAgC,QAA0C;AACjF,SAAQ,QAAR;EACE,KAAK,yBAAyB,QAC5B,QAAO;EACT,KAAK,yBAAyB,eAC5B,QAAO;EACT,KAAK,yBAAyB,QAC5B,QAAO;EACT,KAAK,yBAAyB,QAC5B,QAAO;EACT,KAAK,yBAAyB,OAC5B,QAAO;EACT,QACE,QAAO;;;;;;;;AASb,SAAS,mCAAmC,QAA6C;AACvF,SAAQ,QAAR;EACE,KAAK,4BAA4B,QAC/B,QAAO;EACT,KAAK,4BAA4B,QAC/B,QAAO;EACT,KAAK,4BAA4B,QAC/B,QAAO;EACT,KAAK,4BAA4B,OAC/B,QAAO;EACT,QACE,QAAO;;;;;;;;AASb,SAAgB,mBAAmB,UAAsC;AACvE,QAAO;EACL,MAAM,SAAS;EACf,SAAS,SAAS;EAClB,cAAc,OAAO,KAAK,SAAS,aAAa,CAAC;EACjD,WAAW,SAAS,YAAY,cAAc,SAAS,UAAU,GAAG;EACrE;;;;;;;AAQH,SAAgB,eAAe,UAAkC;CAC/D,MAAM,eAAuC,EAAE;AAC/C,MAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QAAQ,SAAS,aAAa,CACjE,cAAa,QAAQ,QAAQ,UAAU;AAGzC,QAAO;EACL,MAAM,SAAS;EACf,IAAI,SAAS;EACb,SAAS,SAAS;EACJ;EACd,WAAW,SAAS,YAAY,cAAc,SAAS,UAAU,GAAG;EACpE,WAAW,SAAS,YAAY,cAAc,SAAS,UAAU,GAAG;EACrE;;;;;;;AAQH,SAAgB,2BACd,cAC0B;AAC1B,QAAO;EACL,IAAI,aAAa;EACjB,gBAAgB,aAAa;EAC7B,QAAQ,mCAAmC,aAAa,OAAO;EAC/D,aAAa,aAAa;EAC1B,WAAW,aAAa,YAAY,cAAc,aAAa,UAAU,GAAG;EAC5E,YAAY,aAAa,aAAa,cAAc,aAAa,WAAW,GAAG;EAChF;;;;;;;AAQH,SAAgB,wBAAwB,WAAqD;AAC3F,QAAO;EACL,IAAI,UAAU;EACd,cAAc,UAAU;EACxB,QAAQ,gCAAgC,UAAU,OAAO;EACzD,eAAe,UAAU,cAAc;EACvC,WAAW,UAAU,YAAY,cAAc,UAAU,UAAU,GAAG;EACtE,YAAY,UAAU,aAAa,cAAc,UAAU,WAAW,GAAG;EAC1E;;;;;AChFH,SAASE,QAAM,IAA2B;AACxC,QAAO,IAAI,SAAS,cAAY,WAAWC,WAAS,GAAG,CAAC;;AAG1D,SAASC,aAAW,MAAoB;AACtC,QAAO,KAAK,mBAAmB,SAAS,EAAE,QAAQ,OAAO,CAAC;;AAG5D,SAASC,iBAAe,QAA0C;CAChE,MAAM,aAAa,yBAAyB;AAC5C,SAAQ,QAAR;EACE,KAAK,yBAAyB,QAC5B,QAAO,OAAO,IAAI,WAAW;EAC/B,KAAK,yBAAyB,eAC5B,QAAO,OAAO,QAAQ,WAAW;EACnC,KAAK,yBAAyB,QAC5B,QAAO,OAAO,KAAK,WAAW;EAChC,KAAK,yBAAyB,QAC5B,QAAO,OAAO,QAAQ,WAAW;EACnC,KAAK,yBAAyB,OAC5B,QAAO,OAAO,MAAM,WAAW;EACjC,QACE,QAAO;;;AAIb,SAAS,YAAY,QAA0C;AAE7D,SADoB,OAAO,aAAa,EACxC;EACE,KAAK,UACH,QAAO,yBAAyB;EAClC,KAAK,iBACH,QAAO,yBAAyB;EAClC,KAAK,UACH,QAAO,yBAAyB;EAClC,KAAK,UACH,QAAO,yBAAyB;EAClC,KAAK,SACH,QAAO,yBAAyB;EAClC,QACE,OAAM,IAAI,MACR,mBAAmB,OAAO,mEAC3B;;;AAeP,eAAsB,uBACpB,SACkC;CAKlC,MAAM,eACJ,WAAW,kBAAkB,UACzB,QAAQ,eACR,WAAW,cAAc,UACvB,QAAQ,UAAU,OAClB;CAKR,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CAEF,MAAM,UAA4D,EAAE;AAEpE,KAAI,aACF,SAAQ,KACN,OAAO,cAAc,EACnB,WAAW,OAAO,iBAAiB;EACjC,OAAO;EACP,UAAU,mBAAmB;EAC7B,OAAO,EAAE,MAAM;GAAE,MAAM;GAAe,OAAO;GAAc,EAAE;EAC9D,CAAC,EACH,CAAC,CACH;AAGH,KAAI,SAAS,QAAQ;EACnB,MAAM,cAAc,YAAY,QAAQ,OAAO;AAC/C,UAAQ,KACN,OAAO,cAAc,EACnB,WAAW,OAAO,iBAAiB;GACjC,OAAO;GACP,UAAU,mBAAmB;GAC7B,OAAO,EAAE,MAAM;IAAE,MAAM;IAAe,OAAO;IAAa,EAAE;GAC7D,CAAC,EACH,CAAC,CACH;;CAGH,MAAM,SACJ,QAAQ,SAAS,IACb,OAAO,cAAc,EACnB,KAAK,SACN,CAAC,GACF;AAaN,SAXmB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EAClE,MAAM,EAAE,YAAY,kBAAkB,MAAM,OAAO,uBAAuB;GACxE;GACA;GACA,UAAU;GACV,eAAe,cAAc;GAC7B;GACD,CAAC;AACF,SAAO,CAAC,YAAY,cAAc;GAClC,EAEgB,IAAI,wBAAwB;;;;;;;AAQhD,eAAsB,qBACpB,SACqC;CAKrC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAEF,eAAe,uBACb,qBACwC;AACxC,MAAI;GACF,MAAM,SAAS,OAAO,cAAc,EAClC,WAAW,OAAO,iBAAiB;IACjC,OAAO;IACP,UAAU,mBAAmB;IAC7B,OAAO,EAAE,MAAM;KAAE,MAAM;KAAe,OAAO;KAAqB,EAAE;IACrE,CAAC,EACH,CAAC;AAQF,WANiB,MAAM,OAAO,uBAAuB;IACnD;IACA;IACA,UAAU;IACX,CAAC,EAEc,WAAW;UACrB;AACN;;;CAIJ,eAAe,uBACb,aACA,aACsC;EACtC,MAAM,EAAE,cAAc,MAAM,OAAO,qBAAqB;GACtD;GACA;GACD,CAAC;AAEF,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,cAAc,YAAY,cAAc;EAG1D,MAAM,SAAsC,wBAAwB,UAAU;AAE9E,MAAI,eAAe,UAAU,cAAc,SAAS,EAClD,QAAO,aAAa,MAAM,QAAQ,IAChC,UAAU,cAAc,IAAI,OAAO,QAAQ;GACzC,MAAM,UAAU,2BAA2B,IAAI;AAC/C,OAAI,IAAI,aAAa;IACnB,MAAM,oBAAoB,MAAM,uBAAuB,IAAI,YAAY;AACvE,QAAI,kBACF,QAAO;KACL,GAAG;KACH,MAAM,kBAAkB,QAAQ;KAChC,QAAQ,kBAAkB,UAAU;KACrC;;AAGL,UAAO;IACP,CACH;AAGH,SAAO;;CAGT,eAAe,oBAA0D;EACvE,MAAM,WAAW,QAAQ,YAAY;AAErC,SAAO,MAAM;GACX,MAAM,EAAE,cAAc,MAAM,OAAO,qBAAqB;IACtD;IACA,aAAa,QAAQ;IACtB,CAAC;AAEF,OAAI,CAAC,UACH,OAAM,IAAI,MAAM,cAAc,QAAQ,YAAY,cAAc;AAIlE,OAAI,kCAAkC,UAAU,OAAO,CACrD,QAAO,MAAM,uBAAuB,QAAQ,aAAa,QAAQ,QAAQ,MAAM;AAGjF,SAAMH,QAAM,SAAS;;;AAMzB,QAAO;EACL,WAHgB,MAAM,uBAAuB,QAAQ,aAAa,QAAQ,QAAQ,MAAM;EAIxF,MAAM;EACP;;AAGH,eAAe,gBACb,QACA,UACA,MACsC;CACtC,MAAM,UAAU,CAAC,OAAO,KAAK,CAAC,MAAM,sCAAsC,GAAG;CAE7E,MAAM,iBAAiB,kBAAkB;AACvC,MAAI,QAEF,SAAQ,OAAO,wCADHE,6BAAW,IAAI,MAAM,CAAC,CACyB;IAE5D,SAAS;AAEZ,KAAI;EACF,MAAM,SAAS,MAAM,QAAQ;EAC7B,MAAM,gBAAgBC,iBACpB,yBAAyB,OAAO,QACjC;AACD,MAAI,OAAO,WAAW,UACpB,UAAS,QAAQ,cAAc,gBAAgB;MAE/C,UAAS,KAAK,cAAc,gBAAgB;AAE9C,SAAO;WACC;AACR,gBAAc,eAAe;AAC7B,WAAS,MAAM;;;;;;;AAQnB,SAAgB,uBAAuB,WAA8C;CAEnF,MAAM,cAAc,SAA+B,OAAO,KAAK,aAAa,GAAG;CAG/E,MAAM,cAAkC;EACtC,CAAC,MAAM,UAAU,GAAG;EACpB,CAAC,gBAAgB,UAAU,aAAa;EACxC,CAAC,UAAU,UAAU,OAAO;EAC5B,CAAC,iBAAiB,UAAU,cAAc,UAAU,CAAC;EACrD,CAAC,aAAa,WAAW,UAAU,UAAU,CAAC;EAC9C,CAAC,cAAc,WAAW,UAAU,WAAW,CAAC;EACjD;AACD,QAAO,IAAI,oBAAoB,YAAY,CAAC;AAG5C,KAAI,UAAU,cAAc,UAAU,WAAW,SAAS,GAAG;AAC3D,SAAO,IAAI,OAAO,KAAK,oBAAoB,CAAC;AAC5C,OAAK,MAAM,OAAO,UAAU,YAAY;AACtC,UAAO,IAAI,OAAO,KAAK,SAAS,IAAI,eAAe,MAAM,CAAC;AAC1D,UAAO,IAAI,aAAa,IAAI,SAAS;AACrC,UAAO,IAAI,cAAc,WAAW,IAAI,UAAU,GAAG;AACrD,UAAO,IAAI,eAAe,WAAW,IAAI,WAAW,GAAG;AAEvD,OAAI,IAAI,MAAM;AACZ,WAAO,IAAI,OAAO,QAAQ,YAAY,CAAC;IACvC,MAAM,WAAW,IAAI,KAAK,MAAM,KAAK;AACrC,SAAK,MAAM,QAAQ,SACjB,QAAO,IAAI,OAAO,OAAO;;AAI7B,OAAI,IAAI,QAAQ;AACd,WAAO,IAAI,OAAO,QAAQ,cAAc,CAAC;AACzC,QAAI;KACF,MAAM,SAAS,KAAK,MAAM,IAAI,OAAO;AACrC,YAAO,IAAI,OAAO,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC,MAAM,KAAK,CAAC,KAAK,SAAS,GAAG;YACzE;AACN,YAAO,IAAI,OAAO,IAAI,SAAS;;;;;;AAOzC,MAAa,oBAAoB,cAAc;CAC7C,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,aAAa,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACtC,YAAY;GACZ,aAAa;GACd,CAAC;EACF,iBAAiB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAC1C,OAAO;GACP,aAAa;GACd,CAAC;EACF,QAAQ,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACjC,OAAO;GACP,aAAa;GACd,CAAC;EACF,GAAG;EACH,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE,EACpC,aAAa,iDACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,MAAI,KAAK,aAAa;GACpB,MAAM,WAAW,cAAc,KAAK,SAAS;GAC7C,MAAM,EAAE,WAAW,SAAS,MAAM,qBAAqB;IACrD,aAAa,KAAK;IAClB,aAAa,KAAK;IAClB,SAAS,KAAK;IACd;IACA,MAAM,KAAK;IACZ,CAAC;AAEF,OAAI,CAAC,KAAK,KACR,QAAO,KAAK,iBAAiB,UAAU,MAAM,EAAE,MAAM,UAAU,CAAC;GAGlE,MAAM,SAAS,KAAK,OAAO,MAAM,gBAAgB,MAAM,UAAU,KAAK,KAAK,GAAG;AAE9E,OAAI,KAAK,QAAQ,CAAC,KAAK,KACrB,wBAAuB,OAAO;OAE9B,QAAO,IAAI,OAAO;SAEf;GACL,MAAM,aAAa,MAAM,uBAAuB;IAC9C,aAAa,KAAK;IAClB,SAAS,KAAK;IACd,cAAc,KAAK;IACnB,QAAQ,KAAK;IACd,CAAC;AACF,UAAO,IAAI,WAAW;;GAExB;CACH,CAAC;;;;;;;;;;;AC5ZF,eAAsB,gBACpB,QACA,aACA,MACA;CACA,MAAM,EAAE,aAAa,MAAM,OAAO,kBAAkB;EAClD;EACA,cAAc;EACf,CAAC;AACF,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,aAAa,KAAK,cAAc;AAElD,QAAO;;AAYT,eAAsB,YACpB,SACuB;CAIvB,MAAM,OAAO,UAAU,UAAU,QAAQ,OAAO,QAAQ,SAAS;CAKjE,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;AAEF,KAAI;AAEF,SAAO,eADU,MAAM,gBAAgB,QAAQ,aAAa,KAAK,CAClC;UACxB,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,aAAa,KAAK,cAAc;AAElD,QAAM;;;AAIV,MAAaC,eAAa,cAAc;CACtC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,WAAW,MAAM,YAAY;GACjC,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,SAAO,IAAI,SAAS;GACpB;CACH,CAAC;;;;AC5BF,SAAS,MAAM,IAA2B;AACxC,QAAO,IAAI,SAAS,cAAY,WAAWC,WAAS,GAAG,CAAC;;AAG1D,SAASC,aAAW,MAAoB;AACtC,QAAO,KAAK,mBAAmB,SAAS,EAAE,QAAQ,OAAO,CAAC;;AAG5D,SAAS,eAAe,QAA0C;CAChE,MAAM,aAAa,yBAAyB;AAC5C,SAAQ,QAAR;EACE,KAAK,yBAAyB,QAC5B,QAAO,OAAO,IAAI,WAAW;EAC/B,KAAK,yBAAyB,eAC5B,QAAO,OAAO,QAAQ,WAAW;EACnC,KAAK,yBAAyB,QAC5B,QAAO,OAAO,KAAK,WAAW;EAChC,KAAK,yBAAyB,QAC5B,QAAO,OAAO,QAAQ,WAAW;EACnC,KAAK,yBAAyB,OAC5B,QAAO,OAAO,MAAM,WAAW;EACjC,QACE,QAAO;;;;;;;;AAkBb,eAAsB,iBACpB,SACgC;CAChC,MAAM,EAAE,QAAQ,aAAa,aAAa,UAAU,cAAc,cAAc;CAEhF,IAAI;CACJ,IAAI;CACJ,MAAM,UAAU,eACZ,IAAI,EACF,QAAQ,GACT,CAAC,CAAC,MAAM,sCAAsC,GAC/C;AAEJ,KAAI;AACF,SAAO,MAAM;GACX,MAAM,EAAE,cAAc,MAAM,OAAO,qBAAqB;IACtD;IACA;IACD,CAAC;AAEF,OAAI,CAAC,WAAW;AACd,aAAS,KAAK,cAAc,YAAY,cAAc;AACtD,UAAM,IAAI,MAAM,cAAc,YAAY,cAAc;;GAG1D,MAAM,MAAMA,6BAAW,IAAI,MAAM,CAAC;GAClC,MAAM,gBAAgB,eAAe,UAAU,OAAO;AAGtD,OAAI,UAAU,WAAW,YAAY;AACnC,QAAI,cAAc;AAChB,cAAS,MAAM;AACf,YAAO,KAAK,WAAW,iBAAiB;MACtC,MAAM;MACN,QAAQ;MACT,CAAC;AACF,cAAS,MAAM,sCAAsC;;AAEvD,iBAAa,UAAU;;AAIzB,OAAI,aAAa,UAAU,WAAW,yBAAyB,SAAS;IACtE,MAAM,cAAc,eAAe,UAAU;AAC7C,QAAI,eAAe,gBAAgB,iBAAiB;AAClD,SAAI,cAAc;AAChB,eAAS,MAAM;AACf,aAAO,KAAK,SAAS,YAAY,IAAI,iBAAiB;OACpD,MAAM;OACN,QAAQ;OACT,CAAC;AACF,eAAS,MAAM,sCAAsC;;AAEvD,uBAAkB;;;AAItB,OAAI,QACF,SAAQ,OAAO,wCAAwC,IAAI;AAI7D,OAAI,iBAAiB,UAAU,OAAO,EAAE;AACtC,QAAI,UAAU,WAAW,yBAAyB,QAChD,UAAS,QAAQ,cAAc,gBAAgB;aACtC,UAAU,WAAW,yBAAyB,OACvD,UAAS,KAAK,cAAc,gBAAgB;QAE5C,UAAS,KAAK,cAAc,gBAAgB;AAE9C,WAAO,wBAAwB,UAAU;;AAG3C,SAAM,MAAM,SAAS;;UAEhB,OAAO;AACd,WAAS,MAAM;AACf,QAAM;;;AAIV,SAAS,eAAe,WAAsC;AAC5D,QAAO,UAAU,cACd,QAAQ,QAAQ,IAAI,WAAW,4BAA4B,QAAQ,CACnE,KAAK,QAAQ,IAAI,eAAe,CAChC,KAAK,KAAK;;AAGf,SAAS,iBAAiB,QAA2C;AACnE,QACE,WAAW,yBAAyB,WACpC,WAAW,yBAAyB,UACpC,WAAW,yBAAyB;;AAsBxC,eAAe,kBACb,SACsC;CACtC,MAAM,EAAE,QAAQ,aAAa,iBAAiB;AAE9C,KAAI;EACF,MAAM,WAAW,MAAM,gBAAgB,QAAQ,aAAa,aAAa;EACzE,MAAM,cAAc,OAAO,mBAAmB,QAAQ,YAAY;EAClE,MAAMC,QACJ,QAAQ,QAAQ,SACZ,SACA,OAAO,QAAQ,QAAQ,WACrB,QAAQ,MACR,KAAK,UAAU,QAAQ,IAAI;EAEnC,MAAM,EAAE,gBAAgB,MAAM,OAAO,kBAAkB;GACrD;GACA,YAAY,SAAS;GACrB;GACA;GACD,CAAC;AAEF,SAAO;GACL;GACA,OAAO,gBACL,iBAAiB;IACf;IACA;IACA;IACA,UAAU,QAAQ,YAAY;IAC9B,cAAc,aAAa;IAC3B,WAAW;IACZ,CAAC;GACL;UACM,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,aAAa,aAAa,cAAc;AAE1D,QAAM;;;AAIV,eAAe,oBACb,SACsC;CAKtC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAEF,MAAM,EAAE,WAAW,MAAM,WAAW,QAAQ,WAAW;CACvD,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AACF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;AAGpF,QAAO,MAAM,kBAAkB;EAC7B;EACA;EACA,cAAc,QAAQ;EACtB,aAAa;GACX,WAAW,YAAY;GACvB,iBAAiB,QAAQ;GAC1B;EACD,KAAK,QAAQ;EACb,UAAU,QAAQ;EACnB,CAAC;;AAcJ,eAAsB,cACpB,SACsC;AAEtC,KAAI,UAAU,QACZ,QAAO,MAAM,oBAAoB,QAAQ;AAa3C,QAAO,MAAM,kBAAkB;EAC7B,QAPa,MAAM,mBAJD,MAAM,gBAAgB;GACxC,YAAY;GACZ,SAAS,QAAQ;GAClB,CAAC,CACkD;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,QAAQ;GACrB,SAAS,QAAQ;GAClB,CAAC;EAKA,cAAc,QAAQ,SAAS;EAC/B,aAAa,QAAQ;EACrB,KAAK,QAAQ;EACb,UAAU,QAAQ;EACnB,CAAC;;AAGJ,MAAa,eAAe,cAAc;CACxC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,GAAG;EACH,aAAa,IAAI,EAAE,QAAQ,EAAE;GAC3B,OAAO;GACP,aAAa;GACd,CAAC;EACF,KAAK,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAC9B,OAAO;GACP,aAAa;GACd,CAAC;EACF,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,EAAE,aAAa,SAAS,MAAM,oBAAoB;GACtD,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,KAAK,KAAK;GACV,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GACjB,UAAU,cAAc,KAAK,SAAS;GACvC,CAAC;AAEF,SAAO,KAAK,iBAAiB,eAAe,EAAE,MAAM,UAAU,CAAC;AAE/D,MAAI,KAAK,MAAM;GACb,MAAM,SAAS,MAAM,KAAK,EAAE,cAAc,MAAM,CAAC;AACjD,OAAI,KAAK,QAAQ,CAAC,KAAK,MAAM;IAC3B,MAAM,EAAE,cAAc,MAAM,qBAAqB;KAC/C;KACA,aAAa,KAAK;KAClB,SAAS,KAAK;KACd,MAAM;KACP,CAAC;AACF,2BAAuB,UAAU;SAEjC,QAAO,IAAI,OAAO;QAGpB,QAAO,IAAI,EAAE,aAAa,CAAC;GAE7B;CACH,CAAC;;;;AC1QF,SAAS,WAAW,MAAoB;AACtC,QAAO,KAAK,mBAAmB,SAAS,EAAE,QAAQ,OAAO,CAAC;;AAc5D,eAAsB,iBACpB,SACgC;CAEhC,MAAM,eAAe,kBAAkB,UAAU,QAAQ,eAAe,QAAQ,SAAS;CAKzF,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAEF,MAAM,UAA4D,EAAE;AAEpE,KAAI,QAAQ,QAAQ;EAClB,MAAM,cAAc,uBAAuB,QAAQ,OAAO;AAC1D,UAAQ,KACN,OAAO,cAAc,EACnB,WAAW,OAAO,iBAAiB;GACjC,OAAO;GACP,UAAU,mBAAmB;GAC7B,OAAO,EAAE,MAAM;IAAE,MAAM;IAAe,OAAO;IAAa,EAAE;GAC7D,CAAC,EACH,CAAC,CACH;;CAGH,MAAM,SAAS,QAAQ,SAAS,IAAI,OAAO,cAAc,EAAE,KAAK,SAAS,CAAC,GAAG;AAE7E,KAAI;EACF,MAAM,EAAE,SAAS,MAAM,OAAO,iBAAiB;GAC7C;GACA;GACA,UAAU,QAAQ;GAClB,eAAe,cAAc;GAC7B;GACD,CAAC;AAEF,SAAO,KAAK,IAAI,sBAAsB;UAC/B,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,aAAa,aAAa,cAAc;AAE1D,QAAM;;;AAeV,eAAsB,eACpB,SACgC;CAEhC,MAAM,eAAe,kBAAkB,UAAU,QAAQ,eAAe,QAAQ,SAAS;CAKzF,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;AAEF,KAAI;EACF,MAAM,EAAE,QAAQ,MAAM,OAAO,eAAe;GAC1C;GACA;GACA,OAAO,QAAQ;GAChB,CAAC;AAEF,MAAI,CAAC,IACH,OAAM,IAAI,MAAM,QAAQ,QAAQ,MAAM,cAAc;EAGtD,MAAM,UAAU,kBAAkB,IAAI;AAEtC,MAAI,QAAQ,UAAU;GACpB,MAAM,WAAW,MAAM,SAAS,OAAO,WAAW,gBAAgB;IAChE,MAAM,EAAE,sBAAU,kBAAkB,MAAM,OAAO,wBAAwB;KACvE;KACA,OAAO,QAAQ;KACf;KACA,UAAU;KACV,eAAe,cAAc;KAC9B,CAAC;AACF,WAAO,CAACC,YAAU,cAAc;KAChC;AAEF,UAAO;IACL,GAAG;IACH,UAAU,SAAS,IAAI,yBAAyB;IACjD;;AAGH,SAAO;UACA,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,QAAQ,QAAQ,MAAM,4BAA4B,aAAa,IAAI;AAErF,QAAM;;;AAeV,eAAsB,iBACpB,SACiC;CAEjC,MAAM,eAAe,kBAAkB,UAAU,QAAQ,eAAe,QAAQ,SAAS;CAKzF,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAEF,MAAM,WAAW,QAAQ,YAAY;CACrC,MAAM,UAAU,KAAK,CAAC,MAAM,0CAA0C;AAEtE,KAAI;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,oBAAoB;GACpD;GACA,MAAM;GACP,CAAC;AAEF,MAAI,CAAC,SACH,OAAM,IAAI,MAAM,aAAa,aAAa,cAAc;EAG1D,MAAM,aAAa,SAAS;EAC5B,MAAM,gBAAgB,2BAA2B,WAAW;EAG5D,IAAI;AACJ,SAAO,MAAM;AAOX,UANiB,MAAM,OAAO,eAAe;IAC3C;IACA;IACA,OAAO,QAAQ;IAChB,CAAC,EAEa;AACf,OAAI,CAAC,IACH,OAAM,IAAI,MAAM,QAAQ,QAAQ,MAAM,cAAc;AAGtD,OAAI,4BAA4B,IAAI,OAAO,CACzC;AAGF,WAAQ,OAAO,gCAAgC,2BAAW,IAAI,MAAM,CAAC,CAAC;AACtE,SAAMC,aAAW,SAAS;;EAG5B,MAAM,UAAU,kBAAkB,IAAI;EACtC,MAAM,gBAAgB,0BAA0B,QAAQ,OAAO;AAE/D,MAAI,IAAI,WAAW,kBAAkB,QACnC,SAAQ,QAAQ,2BAA2B,gBAAgB;MAE3D,SAAQ,KAAK,2BAA2B,gBAAgB;EAe1D,MAAM,gBAXW,MAAM,SAAS,OAAO,WAAW,gBAAgB;GAChE,MAAM,EAAE,UAAU,kBAAkB,MAAM,OAAO,wBAAwB;IACvE;IACA,OAAO,QAAQ;IACf;IACA,UAAU;IACV,eAAe,cAAc;IAC9B,CAAC;AACF,UAAO,CAAC,UAAU,cAAc;IAChC,EAE4B,IAAI,yBAAyB;EAC3D,MAAM,YAAmC;GACvC,GAAG;GACH,UAAU;GACX;EAGD,MAAM,qBADgB,aAAa,IACO;AAG1C,MAAI,mBACF,SAAQ,YAAR;GACE,KAAK,mBAAmB;AAEtB,YAAQ,MAAM;AAEd,QAAI;KAEF,MAAM,kBAAkB,MAAM,iBAAiB;MAC7C;MACA;MACA,aAAa;MACb;MACA,cAAc;MACd,WAAW;MACZ,CAAC;KAGF,IAAI;AACJ,SAAI,QAAQ,MAAM;MAChB,MAAM,EAAE,WAAW,iBAAiB,MAAM,qBAAqB;OAC7D,aAAa;OACb,aAAa,QAAQ;OACrB,SAAS,QAAQ;OACjB,MAAM;OACP,CAAC;AACF,UAAI,aAAa,WACf,mBAAkB,aAAa,WAC5B,QAAQ,UAAQC,MAAI,QAAQA,MAAI,OAAO,CACvC,KAAK,WAAS;OACb,SAASA,MAAI,kBAAkBA,MAAI;OACnC,MAAMA,MAAI;OACV,QAAQA,MAAI;OACb,EAAE;;AAIT,YAAO;MACL,KAAK;MACL,YAAY;MACZ,qBAAqB;MACrB,gBAAgB,gBAAgB;MAChC;MACD;aACM,OAAO;AACd,YAAO,KACL,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,QACjF;AACD,YAAO;MACL,KAAK;MACL,YAAY;MACZ,qBAAqB;MACtB;;GAIL,KAAK,mBAAmB;GACxB,KAAK,mBAAmB;AAGpB,YAAQ,MAAM,kCAAkC,mBAAmB,KAAK;AAExE,QAAI;AACF,YAAO,MAAM;MACX,MAAM,EAAE,cAAc,MAAM,OAAO,qBAAqB;OACtD;OACA,aAAa;OACd,CAAC;AAEF,UAAI,CAAC,UACH,OAAM,IAAI,MAAM,uBAAuB,mBAAmB,cAAc;AAG1E,UAAI,kCAAkC,UAAU,OAAO,EAAE;OACvD,MAAM,YAAY,gCAAgC,UAAU,OAAO;OACnE,MAAM,kBAAkB,gCAAgC,UAAU;AAClE,WAAI,UAAU,WAAW,yBAAyB,QAChD,SAAQ,QAAQ,iCAAiC,kBAAkB;WAEnE,SAAQ,KAAK,iCAAiC,kBAAkB;AAElE,cAAO;QACL,KAAK;QACL,YAAY;QACZ,qBAAqB;QACrB,gBAAgB;QAChB,cAAc,QAAQ,OAAO,UAAU,QAAQ,SAAY;QAC5D;;AAGH,cAAQ,OAAO,sCAAsC,2BAAW,IAAI,MAAM,CAAC,CAAC;AAC5E,YAAMD,aAAW,SAAS;;aAErB,OAAO;AACd,aAAQ,KACN,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,QACjF;AACD,YAAO;MACL,KAAK;MACL,YAAY;MACZ,qBAAqB;MACtB;;AAGL;GACF,QAEE;;AAIN,SAAO;GAAE,KAAK;GAAW,YAAY;GAAe;WAC5C;AACR,UAAQ,MAAM;;;AAIlB,SAAS,qBAAqB,KAAkC;CAE9D,MAAM,cAAkC;EACtC,CAAC,MAAM,IAAI,GAAG;EACd,CAAC,gBAAgB,IAAI,aAAa;EAClC,CAAC,UAAU,IAAI,OAAO;EACtB,CAAC,eAAe,IAAI,YAAY;EAChC,CAAC,aAAa,IAAI,UAAU;EAC5B,CAAC,aAAa,IAAI,UAAU;EAC7B;AACD,QAAO,IAAI,oBAAoB,YAAY,CAAC;AAG5C,KAAI,IAAI,YAAY,IAAI,SAAS,SAAS,GAAG;AAC3C,SAAO,IAAI,OAAO,KAAK,cAAc,CAAC;AACtC,OAAK,MAAM,WAAW,IAAI,UAAU;AAClC,UAAO,IAAI,OAAO,KAAK,iBAAiB,QAAQ,GAAG,MAAM,CAAC;AAC1D,UAAO,IAAI,aAAa,QAAQ,SAAS;AACzC,UAAO,IAAI,cAAc,QAAQ,YAAY;AAC7C,UAAO,IAAI,eAAe,QAAQ,aAAa;AAE/C,OAAI,QAAQ,OAAO;AACjB,WAAO,IAAI,OAAO,MAAM,aAAa,CAAC;IACtC,MAAM,aAAa,QAAQ,MAAM,MAAM,KAAK;AAC5C,SAAK,MAAM,QAAQ,WACjB,QAAO,IAAI,OAAO,OAAO;;;;;AAOnC,MAAa,cAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,UAAU;EACR;GACE,KAAK;GACL,MAAM;GACP;EACD;GAAE,KAAK;GAA0B,MAAM;GAA4B;EACnE;GAAE,KAAK;GAA0B,MAAM;GAAoB;EAC3D;GAAE,KAAK;GAAwB,MAAM;GAAmB;EACxD;GACE,KAAK;GACL,MAAM;GACP;EACD;GAAE,KAAK;GAA2B,MAAM;GAA4B;EACpE;GACE,KAAK;GACL,MAAM;GACP;EACF;CACD,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,cAAc,IAAI,EAAE,QAAQ,EAAE;GAC5B,YAAY;GACZ,aAAa;GACd,CAAC;EACF,OAAO,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAChC,YAAY;GACZ,aAAa;GACd,CAAC;EACF,QAAQ,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACjC,OAAO;GACP,aACE;GACH,CAAC;EACF,UAAU,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE,EACxC,aAAa,2DACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACpC,OAAO;GACP,aACE;GACH,CAAC;EACF,UAAU,IAAI,YAAY,QAAQ,KAAK,EAAE;GACvC,OAAO;GACP,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACF,OAAO,IAAI,eAAe,UAAU,EAAE,EACpC,aAAa,4EACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,MAAI,KAAK,OAAO;AACd,OAAI,KAAK,MAAM;IACb,MAAM,SAAS,MAAM,iBAAiB;KACpC,cAAc,KAAK;KACnB,OAAO,KAAK;KACZ,aAAa,KAAK;KAClB,SAAS,KAAK;KACd,UAAU,cAAc,KAAK,SAAS;KACtC,MAAM,KAAK;KACZ,CAAC;AAGF,QAAI,CAAC,KAAK,MAAM;AACd,YAAO,IAAI,OAAO,KAAK,gBAAgB,OAAO,WAAW,IAAI,CAAC;AAC9D,0BAAqB,OAAO,IAAI;AAChC,SAAI,OAAO,qBAAqB;AAC9B,aAAO,IAAI,OAAO,KAAK,wBAAwB,CAAC;AAChD,aAAO,IAAI,SAAS,OAAO,sBAAsB;AACjD,UAAI,OAAO,eACT,QAAO,IAAI,aAAa,OAAO,iBAAiB;AAElD,UAAI,OAAO,mBAAmB,OAAO,gBAAgB,SAAS,EAC5D,MAAK,MAAM,UAAU,OAAO,iBAAiB;AAC3C,cAAO,IAAI,OAAO,KAAK,YAAY,OAAO,UAAU,CAAC;AACrD,WAAI,OAAO,MAAM;AACf,eAAO,IAAI,OAAO,IAAI,YAAY,CAAC;AACnC,aAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,KAAK,CACxC,QAAO,IAAI,SAAS,OAAO;;AAG/B,WAAI,OAAO,QAAQ;AACjB,eAAO,IAAI,OAAO,IAAI,cAAc,CAAC;AACrC,YAAI;SACF,MAAM,SAAS,KAAK,MAAM,OAAO,OAAO;SACxC,MAAM,YAAY,KAAK,UAAU,QAAQ,MAAM,EAAE;AACjD,cAAK,MAAM,QAAQ,UAAU,MAAM,KAAK,CACtC,QAAO,IAAI,SAAS,OAAO;gBAEvB;AACN,gBAAO,IAAI,SAAS,OAAO,SAAS;;;;;AAM9C,SAAI,OAAO,qBAAqB;AAC9B,aAAO,IAAI,OAAO,KAAK,wBAAwB,CAAC;AAChD,aAAO,IAAI,SAAS,OAAO,sBAAsB;AACjD,UAAI,OAAO,eACT,QAAO,IAAI,aAAa,OAAO,iBAAiB;AAElD,UAAI,OAAO,cAAc;AACvB,cAAO,IAAI,OAAO,IAAI,UAAU,CAAC;AACjC,YAAK,MAAM,QAAQ,OAAO,aAAa,MAAM,KAAK,CAChD,QAAO,IAAI,OAAO,OAAO;;;UAK/B,QAAO,IAAI,OAAO;AAEpB;;GAGF,MAAM,MAAM,MAAM,eAAe;IAC/B,cAAc,KAAK;IACnB,OAAO,KAAK;IACZ,UAAU,KAAK;IACf,aAAa,KAAK;IAClB,SAAS,KAAK;IACf,CAAC;AACF,OAAI,KAAK,YAAY,CAAC,KAAK,KACzB,sBAAqB,IAAI;OAEzB,QAAO,IAAI,IAAI;SAEZ;AACL,OAAI,KAAK,KACP,QAAO,KAAK,iEAAiE;GAE/E,MAAM,OAAO,MAAM,iBAAiB;IAClC,cAAc,KAAK;IACnB,QAAQ,KAAK;IACb,OAAO,KAAK;IACZ,aAAa,KAAK;IAClB,SAAS,KAAK;IACf,CAAC;AACF,UAAO,IAAI,KAAK;;GAElB;CACH,CAAC;;;;;;;;;ACnoBF,eAAsB,cAAc,SAA6D;CAK/F,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;AAWF,SATkB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACjE,MAAM,EAAE,WAAW,kBAAkB,MAAM,OAAO,sBAAsB;GACtE;GACA;GACA,UAAU;GACX,CAAC;AACF,SAAO,CAAC,WAAW,cAAc;GACjC,EAEe,KAAK,MAAM,mBAAmB,EAAE,CAAC;;AAGpD,MAAaE,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,YAAY,MAAM,cAAc;GACpC,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,MAAI,UAAU,WAAW,GAAG;AAC1B,UAAO,KAAK,sBAAsB;AAClC;;AAGF,SAAO,IAAI,WAAW,EACpB,SAAS,EACP,WAAW,MAAO,IAAI,OAAO,QAAQ,OAAO,GAAG,OAAO,IAAI,QAAQ,EACnE,EACF,CAAC;AAGF,MAAI,CAAC,KAAK,MAER;OADmB,UAAU,MAAM,MAAM,EAAE,gBAAgB,UAAU,CAEnE,QAAO,KAAK,6DAA6D;;GAG7E;CACH,CAAC;;;;;;;;ACpDF,MAAM,cAAc,EACjB,QAAQ,CACR,WAAW,QAAQ;AAClB,KAAI;AACF,SAAO,KAAK,MAAM,IAAI;SAChB;AACN,QAAM,IAAI,MAAM,sBAAsB,IAAI,uCAAuC;;EAEnF,CACD,QAAQ,MAAuB,OAAO,MAAM,YAAY,MAAM,QAAQ,CAAC,MAAM,QAAQ,EAAE,EAAE,EACxF,SAAS,gEACV,CAAC;;;;;AAMJ,MAAM,YAAY,EACf,QAAQ,CACR,aAAa,KAAK,QAAQ;AACzB,KAAI,CAAC,IAAI,SAAS,IAAI,CACpB,KAAI,SAAS;EACX,MAAM,EAAE,aAAa;EACrB,SAAS,2BAA2B,IAAI;EACzC,CAAC;EAEJ,CACD,WAAW,QAAQ;CAClB,MAAM,aAAa,IAAI,QAAQ,IAAI;AACnC,QAAO;EACL,KAAK,IAAI,MAAM,GAAG,WAAW,CAAC,MAAM;EACpC,OAAO,IAAI,MAAM,aAAa,EAAE,CAAC,MAAM;EACxC;EACD,CACD,QAAQ,MAAM,EAAE,IAAI,SAAS,GAAG,EAC/B,SAAS,+BACV,CAAC;AAoCJ,eAAe,sBACb,SACgC;CAKhC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;AAEF,KAAI;AAOF,SAAO,EAAE,QANQ,MAAM,OAAO,gBAAgB;GAC5C;GACA,cAAc,QAAQ;GACtB,SAAS,QAAQ;GAClB,CAAC,EAEuB,OAAO;UACzB,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,aAAa,QAAQ,aAAa,cAAc;AAElE,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,gBACvD,OAAM,IAAI,MAAM,qBAAqB,MAAM,UAAU;AAEvD,QAAM;;;AAeV,eAAsB,gBACpB,SACgC;AAEhC,KAAI,kBAAkB,QACpB,QAAO,MAAM,sBAAsB,QAAQ;AAG7C,KAAI,QAAQ,SAAS,QAAQ,SAAS,qBAAqB,QAAQ,YAAY,OAC7E,OAAM,IAAI,MACR,aAAa,QAAQ,SAAS,KAAK,SAAS,QAAQ,SAAS,QAAQ,KAAK,mFAE3E;AAGH,QAAO,MAAM,sBAAsB;EACjC,cAAc,QAAQ,SAAS;EAC/B,SAAS,QAAQ;EACjB,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;;AAGJ,MAAa,iBAAiB,cAAc;CAC1C,MAAM;CACN,aAAa;CACb,OAAO;;;;;;;;;;;;;;CAcP,UAAU;EACR;GAAE,KAAK;GAAe,MAAM;GAAuB;EACnD;GACE,KAAK;GACL,MAAM;GACP;EACD;GACE,KAAK;GACL,MAAM;GACP;EACD;GAAE,KAAK;GAAkB,MAAM;GAAmC;EAClE;GAAE,KAAK;GAAqB,MAAM;GAAgC;EACnE;CACD,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,cAAc,IAAI,EAAE,QAAQ,EAAE;GAC5B,YAAY;GACZ,aAAa;GACd,CAAC;EACF,MAAM,IAAI,YAAY,UAAU,EAAE;GAChC,OAAO;GACP,aAAa;GACd,CAAC;EACF,QAAQ,IAAI,UAAU,OAAO,CAAC,UAAU,EAAE;GACxC,OAAO;GACP,sBAAsB;GACtB,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACpC,OAAO;GACP,aACE;GACH,CAAC;EACF,UAAU,IAAI,YAAY,QAAQ,KAAK,EAAE;GACvC,OAAO;GACP,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAMlC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;GACxC,YAAY;GACZ,SAAS,KAAK;GACf,CAAC,CACkD;EACpD,MAAM,cAAc,gBAAgB;GAClC,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;EAEF,MAAM,EAAE,aAAa,MAAM,OAAO,oBAAoB;GACpD;GACA,MAAM,KAAK;GACZ,CAAC;AAEF,MAAI,CAAC,SACH,OAAM,IAAI,MAAM,aAAa,KAAK,aAAa,cAAc;AAI/D,MAAI,SAAS,gBAAgB,oBAAoB,MAC/C,OAAM,IAAI,MACR,aAAa,KAAK,aAAa,SAAS,4BAA4B,SAAS,YAAY,CAAC,2IAE3F;AAIH,MAAI,SAAS,gBAAgB,oBAAoB,aAAa,KAAK,QAAQ,KAAK,QAC9E,OAAM,IAAI,MACR,aAAa,KAAK,aAAa,wHAEhC;EAGH,IAAI;EAGJ,MAAM,OAA+B,KAAK;EAC1C,MAAM,UAAkC,EAAE;AAC1C,MAAI,KAAK,OACP,MAAK,MAAM,KAAK,KAAK,OACnB,SAAQ,EAAE,OAAO,EAAE;AAIvB,MAAI,SAAS,UAAa,OAAO,KAAK,QAAQ,CAAC,SAAS,EACtD,WAAU;GACR,MAAM,QAAQ,EAAE;GAChB;GACD;EAGH,MAAM,SAAS,MAAM,sBAAsB;GACzC,cAAc,KAAK;GACnB;GACA,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,MAAI,CAAC,OAAO,OAAO;AACjB,UAAO,QAAQ,aAAa,KAAK,aAAa,2BAA2B;AACzE,OAAI,KAAK,KACP,QAAO,KAAK,sEAAsE;AAEpF;;AAGF,SAAO,QACL,aAAa,KAAK,aAAa,oCAAoC,OAAO,QAC3E;AAED,MAAI,KAAK,MAAM;GACb,MAAM,cAAc,MAAM,iBAAiB;IACzC,cAAc,KAAK;IACnB,OAAO,OAAO;IACd,aAAa,KAAK;IAClB,SAAS,KAAK;IACd,UAAU,cAAc,KAAK,SAAS;IACtC,MAAM,KAAK;IACZ,CAAC;AAGF,OAAI,CAAC,KAAK,MAAM;AACd,WAAO,IAAI,OAAO,KAAK,kBAAkB,YAAY,aAAa,CAAC;AACnE,WAAO,IAAI,eAAe,YAAY,IAAI,SAAS;AAEnD,QAAI,YAAY,qBAAqB;AACnC,YAAO,IAAI,OAAO,KAAK,wBAAwB,CAAC;AAChD,YAAO,IAAI,SAAS,YAAY,sBAAsB;AACtD,SAAI,YAAY,eACd,QAAO,IAAI,aAAa,YAAY,iBAAiB;AAEvD,SAAI,YAAY,mBAAmB,YAAY,gBAAgB,SAAS,EACtE,MAAK,MAAM,UAAU,YAAY,iBAAiB;AAChD,aAAO,IAAI,OAAO,KAAK,YAAY,OAAO,UAAU,CAAC;AACrD,UAAI,OAAO,MAAM;AACf,cAAO,IAAI,OAAO,IAAI,YAAY,CAAC;AACnC,YAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,KAAK,CACxC,QAAO,IAAI,SAAS,OAAO;;AAG/B,UAAI,OAAO,QAAQ;AACjB,cAAO,IAAI,OAAO,IAAI,cAAc,CAAC;AACrC,WAAI;QACF,MAAM,SAAS,KAAK,MAAM,OAAO,OAAO;QACxC,MAAM,YAAY,KAAK,UAAU,QAAQ,MAAM,EAAE;AACjD,aAAK,MAAM,QAAQ,UAAU,MAAM,KAAK,CACtC,QAAO,IAAI,SAAS,OAAO;eAEvB;AACN,eAAO,IAAI,SAAS,OAAO,SAAS;;;;;AAM9C,QAAI,YAAY,qBAAqB;AACnC,YAAO,IAAI,OAAO,KAAK,wBAAwB,CAAC;AAChD,YAAO,IAAI,SAAS,YAAY,sBAAsB;AACtD,SAAI,YAAY,eACd,QAAO,IAAI,aAAa,YAAY,iBAAiB;AAEvD,SAAI,YAAY,cAAc;AAC5B,aAAO,IAAI,OAAO,IAAI,UAAU,CAAC;AACjC,WAAK,MAAM,QAAQ,YAAY,aAAa,MAAM,KAAK,CACrD,QAAO,IAAI,OAAO,OAAO;;;SAK/B,QAAO,IAAI,YAAY;;GAG3B;CACH,CAAC;;;;;;;;;;ACjVF,SAAS,gBAAgB,aAAqB,cAA8B;AAC1E,QAAO,GAAG,gBAAgB,cAAc,YAAY,YAAY;;;;;;;AAQlE,eAAsB,qBACpB,SACgC;CAKhC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;AAgBF,SAdkB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACjE,MAAM,EAAE,WAAW,kBAAkB,MAAM,OAAO,sBAAsB;GACtE;GACA;GACA,UAAU;GACX,CAAC;AACF,SAAO,CAAC,WAAW,cAAc;GACjC,EAGiC,QAChC,MAAM,EAAE,gBAAgB,oBAAoB,iBAC9C,CAEuB,KAAK,OAAO;EAClC,MAAM,EAAE;EACR,YAAY,gBAAgB,aAAa,EAAE,KAAK;EAChD,UAAU,EAAE;EACb,EAAE;;AAGL,MAAM,qBAAqB,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,YAAY,MAAM,qBAAqB;GAC3C,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,MAAI,UAAU,WAAW,GAAG;AAC1B,UAAO,KAAK,8BAA8B;AAC1C;;AAGF,SAAO,IAAI,WAAW,EACpB,SAAS,EACP,WAAW,MAAO,IAAI,OAAO,QAAQ,OAAO,GAAG,OAAO,IAAI,QAAQ,EACnE,EACF,CAAC;AAEF,MAAI,CAAC,KAAK,KACR,QAAO,KACL,sFACD;GAEH;CACH,CAAC;AAEF,MAAa,iBAAiB,cAAc;CAC1C,MAAM;CACN,aAAa;CACb,aAAa,EACX,MAAM,oBACP;CACD,MAAM,MAAM;AACV,QAAM,WAAW,oBAAoB,EAAE,CAAC;;CAE3C,CAAC;;;;;;;;;;;AC0KF,SAAgB,cACd,WACA,YACS;AACT,QAAO,UAAU,aAAa,SAAS,WAAW;;;;;;;;AC5LpD,MAAM,mBAAmB;CACvB,4BAA4B;CAC5B,mBAAmB;CACnB,8BAA8B;CAC9B,qBAAqB;CACrB,6BAA6B;CAC9B;;;;AAMD,IAAa,eAAb,cAAkC,MAAM;CACtC,YACE,SACA,AAAgB,MAChB,AAAgB,UAChB,AAAgB,eAChB;AACA,QAAM,QAAQ;EAJE;EACA;EACA;AAGhB,OAAK,OAAO;;;;;;;;AAsBhB,SAAgB,6BACd,UAAsC,EAAE,EAChB;CACxB,MAAM,wBAAqC,IAAI,KAAK;CACpD,IAAI,gBAAyD;CAC7D,IAAI,cAAkC;CAEtC,SAAS,iBAA8B;AACrC,MAAI,YACF,QAAO;EAGT,MAAM,gBAAiB,YAAsC;AAC7D,MAAI,OAAO,kBAAkB,YAAY;AACvC,iBAAc;AACd,UAAO;;AAGT,MAAI,OAAQ,gBAA4B,YAAY;AAClD,iBAAc;AACd,UAAO;;AAGT,QAAM,IAAI,aACR,mFACA,iBAAiB,4BAClB;;CAGH,SAAS,mBAAmB,UAAkB,SAAgC;AAC5E,MAAI,QAAQ,IAAI,SAAS,CAAE,QAAO,EAAE;AACpC,UAAQ,IAAI,SAAS;EAErB,MAAM,OAAO,MAAM,IAAI,SAAS;AAChC,MAAI,CAAC,KAAM,QAAO,EAAE;EAEpB,MAAM,SAAmB,EAAE;AAC3B,OAAK,MAAM,aAAa,KAAK,YAAY;AACvC,UAAO,KAAK,UAAU;AACtB,UAAO,KAAK,GAAG,mBAAmB,WAAW,QAAQ,CAAC;;AAGxD,SAAO;;CAGT,SAAS,qBAAqB,UAAkB,SAAgC;AAC9E,MAAI,QAAQ,IAAI,SAAS,CAAE,QAAO,EAAE;AACpC,UAAQ,IAAI,SAAS;EAErB,MAAM,OAAO,MAAM,IAAI,SAAS;AAChC,MAAI,CAAC,KAAM,QAAO,EAAE;EAEpB,MAAM,SAAmB,EAAE;AAC3B,OAAK,MAAM,cAAc,KAAK,cAAc;AAC1C,UAAO,KAAK,WAAW;AACvB,UAAO,KAAK,GAAG,qBAAqB,YAAY,QAAQ,CAAC;;AAG3D,SAAO;;CAGT,SAAS,QAAQ,UAAwB;EACvC,MAAM,eAAe,KAAK,QAAQ,SAAS;AAC3C,MAAI,CAAC,MAAM,IAAI,aAAa,CAC1B,OAAM,IAAI,cAAc;GACtB,UAAU;GACV,8BAAc,IAAI,KAAK;GACvB,4BAAY,IAAI,KAAK;GACtB,CAAC;;CAIN,SAAS,WAAW,UAAwB;EAC1C,MAAM,eAAe,KAAK,QAAQ,SAAS;EAC3C,MAAM,OAAO,MAAM,IAAI,aAAa;AACpC,MAAI,CAAC,KAAM;AAEX,OAAK,MAAM,OAAO,KAAK,cAAc;GACnC,MAAM,UAAU,MAAM,IAAI,IAAI;AAC9B,OAAI,QACF,SAAQ,WAAW,OAAO,aAAa;;AAI3C,OAAK,MAAM,aAAa,KAAK,YAAY;GACvC,MAAM,gBAAgB,MAAM,IAAI,UAAU;AAC1C,OAAI,cACF,eAAc,aAAa,OAAO,aAAa;;AAInD,QAAM,OAAO,aAAa;;CAG5B,SAAS,2BAAuC;AAC9C,MAAI,CAAC,cAAe,QAAO,EAAE;AAC7B,MAAI;AACF,UAAO,cAAc,UAAU;WACxB,OAAO;AACd,UAAO,KAAK,2CAA2C,OAAO,MAAM,GAAG;AACvE,UAAO,EAAE;;;AAIb,QAAO;EACL,MAAM,WAAW,WAAoC;AACnD,OAAI;AACF,QAAI,UAAU,WAAW,EAAG;AAI5B,oBAAgB,MAFF,gBAAgB,CAEF,WAAW;KACrC,gBAAgB,CAAC,MAAM,KAAK;KAC5B,eAAe,CAAC,eAAe;KAC/B,SAAS;KACT,GAAG;KACJ,CAAC;IAEF,MAAM,gBAAgB,cAAc,KAAK;AACzC,UAAM,OAAO;AAEb,SAAK,MAAM,YAAY,UACrB,SAAQ,SAAS;AAGnB,SAAK,MAAM,CAAC,UAAU,iBAAiB,OAAO,QAAQ,cAAc,EAAE;KACpE,MAAM,mBAAmB,KAAK,QAAQ,KAAK,SAAS;KACpD,MAAM,OAAO,MAAM,IAAI,iBAAiB;AACxC,SAAI,CAAC,KAAM;AAEX,UAAK,MAAM,OAAO,cAAc;MAC9B,MAAM,kBAAkB,KAAK,QAAQ,KAAK,IAAI;AAC9C,WAAK,aAAa,IAAI,gBAAgB;MAEtC,MAAM,UAAU,MAAM,IAAI,gBAAgB;AAC1C,UAAI,QACF,SAAQ,WAAW,IAAI,iBAAiB;;;YAIvC,OAAO;AACd,QAAI,iBAAiB,aACnB,OAAM;AAER,UAAM,IAAI,aACR,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAC3F,iBAAiB,4BACjB,QACA,iBAAiB,QAAQ,QAAQ,OAClC;;;EAIL,cAAc,UAA4B;GACxC,MAAM,0BAAU,IAAI,KAAa;AACjC,UAAO,mBAAmB,KAAK,QAAQ,SAAS,EAAE,QAAQ;;EAG5D,gBAAgB,UAA4B;GAC1C,MAAM,0BAAU,IAAI,KAAa;AACjC,UAAO,qBAAqB,KAAK,QAAQ,SAAS,EAAE,QAAQ;;EAG9D;EACA;EACA;EAEA,gBAA4B;GAC1B,IAAI,YAAY;AAChB,QAAK,MAAM,QAAQ,MAAM,QAAQ,CAC/B,cAAa,KAAK,aAAa;AAGjC,UAAO;IACL,WAAW,MAAM;IACjB;IACA,yBAAyB,0BAA0B,CAAC;IACrD;;EAEJ;;;;;;;AAyBH,SAAgB,wBAAwB,UAA0B,EAAE,EAAqB;CACvF,IAAI,kBAAmD;CACvD,MAAM,8BAAuC,IAAI,KAAK;CACtD,MAAM,yBAAyB,6BAA6B,QAAQ,aAAa;CACjF,IAAI,gBAAsC;CAC1C,MAAM,iCAA8C,IAAI,KAAK;CAC7D,IAAI,gBAAgB;CACpB,MAAM,kCAAyC,IAAI,KAAK;CACxD,MAAM,eAAe;CACrB,IAAI,2BAA2B;CAC/B,IAAI,kBAAuC;CAE3C,SAAS,mBAAmB,SAAiB,UAA0B;AACrE,MAAI,CAAC,WAAW,OAAO,YAAY,SACjC,OAAM,IAAI,aACR,uCACA,iBAAiB,oBAClB;AAGH,MAAI,CAAC,MAAM,QAAQ,SAAS,IAAI,SAAS,WAAW,EAClD,OAAM,IAAI,aACR,sCACA,iBAAiB,oBAClB;AAGH,MAAI,YAAY,IAAI,QAAQ,CAC1B,OAAM,IAAI,aACR,wBAAwB,QAAQ,mBAChC,iBAAiB,oBAClB;;CAIL,SAAS,YAAY,OAA2B;AAC9C,SAAO,MACL,uBAAuB,MAAM,QAAQ,UAAU,MAAM,KAAK,cAAc,MAAM,SAAS,GACxF;AAED,MAAI,cACF,eAAc,MAAM;;CAIxB,SAAS,cAAc,KAAa,OAAuB;AACzD,MAAI,gBAAgB,QAAQ,cAAc;GACxC,MAAM,WAAW,gBAAgB,MAAM,CAAC,MAAM,CAAC;AAC/C,OAAI,SACF,iBAAgB,OAAO,SAAS;;AAGpC,kBAAgB,IAAI,KAAK,MAAM;;CAGjC,SAAS,kBAAkB,aAA+B;AACxD,SAAO,uBAAuB,cAAc,YAAY;;CAG1D,SAAS,mBAAmB,eAAmC;AAC7D,SAAO,MAAM,sCAAsC,cAAc,KAAK,KAAK,GAAG;EAC9E,MAAM,oCAAoB,IAAI,KAAa;AAE3C,OAAK,MAAM,CAAC,SAAS,UAAU,YAC7B,MAAK,MAAM,gBAAgB,cACzB,KAAI,MAAM,MAAM,IAAI,aAAa,EAAE;AACjC,UAAO,MAAM,SAAS,QAAQ,wBAAwB,eAAe;AACrE,qBAAkB,IAAI,QAAQ;AAC9B;;AAKN,SAAO,MAAM,KAAK,kBAAkB;;CAGtC,SAAS,gBAAgB,UAAwC;EAC/D,MAAM,WAAW,UAAU;EAC3B,IAAI,gBAAgB,gBAAgB,IAAI,SAAS;AAEjD,MAAI,CAAC,eAAe;AAClB,mBAAgB,kBAAkB,SAAS;AAC3C,iBAAc,UAAU,cAAc;;EAIxC,MAAM,mBAAmB,CAAC,UAAU,GAAG,cAAc;AAGrD,SAAO;GACL,aAAa;GACb,eAAe;GACf,gBALqB,mBAAmB,iBAAiB;GAM1D;;CAGH,eAAe,wBAAuC;EACpD,MAAM,WAAqB,EAAE;AAC7B,OAAK,MAAM,SAAS,YAAY,QAAQ,CACtC,UAAS,KAAK,GAAG,MAAM,KAAK,MAAM,MAAM,CAAC;AAG3C,QAAM,uBAAuB,WAAW,SAAS;AACjD,kBAAgB,OAAO;AAEvB,MAAI,QAAQ,4BAA4B;GACtC,MAAM,eAAe,uBAAuB,0BAA0B;AACtE,OAAI,aAAa,SAAS,EACxB,QAAO,KAAK,mCAAmC,KAAK,UAAU,aAAa,GAAG;;;CAKpF,eAAe,iBAAiB,OAAwB,UAAiC;AACvF,MAAI;GACF,MAAM,eAAe,KAAK,QAAQ,SAAS;AAE3C,OAAI,UAAU,SACZ,wBAAuB,WAAW,aAAa;QAC1C;AACL,2BAAuB,QAAQ,aAAa;AAC5C,QAAI,UAAU,SACZ,OAAM,uBAAuB;;AAIjC,mBAAgB,OAAO;GAEvB,MAAM,eAAe,gBAAgB,aAAa;AAGlD,OAAI,aAAa,eAAe,SAAS,GAAG;AAC1C,WAAO,KAAK,qDAAqD,EAC/D,MAAM,UACP,CAAC;AACF,WAAO,KAAK,iBAAiB,gBAAgB,EAAE,MAAM,UAAU,CAAC;AAChE,WAAO,KAAK,oBAAoB,aAAa,eAAe,KAAK,KAAK,IAAI,EACxE,MAAM,UACP,CAAC;AAEF,QAAI,gBACF,kBAAiB;SAGnB,QAAO,MAAM,sCAAsC,eAAe;WAE7D,OAAO;AACd,eACE,IAAI,aACF,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IACvF,iBAAiB,4BACjB,UACA,iBAAiB,QAAQ,QAAQ,OAClC,CACF;;;CAIL,SAAS,mBAAmB,OAAwB,UAAwB;EAC1E,MAAM,MAAM,GAAG,MAAM,GAAG;AAExB,MAAI,eAAe,IAAI,IAAI,CACzB,cAAa,eAAe,IAAI,IAAI,CAAC;EAGvC,MAAM,QAAQ,iBAAiB;AAC7B,oBAAiB,OAAO,SAAS;AACjC,kBAAe,OAAO,IAAI;KACzB,QAAQ,gBAAgB,IAAI;AAE/B,iBAAe,IAAI,KAAK,MAAM;;CAGhC,eAAe,OAAsB;AACnC,MAAI,iBAAiB;AACnB,SAAM,gBAAgB,OAAO;AAC7B,qBAAkB;;AAGpB,OAAK,MAAM,SAAS,eAAe,QAAQ,CACzC,cAAa,MAAM;AAErB,iBAAe,OAAO;AAEtB,wBAAsB;AACtB,kBAAgB;;CAGlB,SAAS,sBAA4B;AACnC,MAAI,yBAA0B;EAE9B,MAAM,eAAe,YAAY;AAC/B,OAAI;AACF,UAAM,MAAM;AACZ,WAAO,KAAK,+BAA+B;AAC3C,YAAQ,KAAK,EAAE;YACR,OAAO;AACd,WAAO,MAAM,0BAA0B,OAAO,MAAM,GAAG;AACvD,YAAQ,KAAK,EAAE;;;AAInB,UAAQ,GAAG,gBAAgB,cAAc,CAAC;AAC1C,UAAQ,GAAG,iBAAiB,cAAc,CAAC;AAC3C,6BAA2B;;CAG7B,SAAS,uBAA6B;AACpC,MAAI,CAAC,yBAA0B;AAE/B,UAAQ,mBAAmB,SAAS;AACpC,UAAQ,mBAAmB,UAAU;AACrC,6BAA2B;;CAG7B,eAAe,aAA4B;AACzC,MAAI,cAAe;AAEnB,MAAI;AACF,qBAAkB,MAAM,EAAE,EAAE;IAC1B,SAAS;IACT,YAAY;IACZ,eAAe;IACf,YAAY;IACZ,kBAAkB;KAChB,oBAAoB;KACpB,cAAc;KACf;IACD,GAAG,QAAQ;IACZ,CAAC;AAEF,mBAAgB,GAAG,QAAQ,aAAqB;AAC9C,WAAO,MAAM,eAAe,WAAW;AACvC,uBAAmB,OAAO,SAAS;KACnC;AAEF,mBAAgB,GAAG,WAAW,aAAqB;AACjD,WAAO,MAAM,iBAAiB,WAAW;AACzC,uBAAmB,UAAU,SAAS;KACtC;AAEF,mBAAgB,GAAG,WAAW,aAAqB;AACjD,WAAO,MAAM,iBAAiB,WAAW;AACzC,uBAAmB,UAAU,SAAS;KACtC;AAEF,mBAAgB,GAAG,UAAU,UAAmB;AAC9C,WAAO,MAAM,kBAAkB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAAI,EACvF,MAAM,UACP,CAAC;AACF,gBACE,IAAI,aACF,uBAAuB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAC7E,iBAAiB,mBACjB,QACA,iBAAiB,QAAQ,QAAQ,OAClC,CACF;KACD;AAEF,wBAAqB;AACrB,mBAAgB;WACT,OAAO;AACd,SAAM,IAAI,aACR,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IACvF,iBAAiB,mBACjB,QACA,iBAAiB,QAAQ,QAAQ,OAClC;;;AAIL,QAAO;EACL;EAEA,MAAM,cAAc,SAAiB,UAAmC;AACtE,sBAAmB,SAAS,SAAS;AAErC,OAAI,CAAC,cACH,OAAM,YAAY;GAGpB,MAAM,wBAAQ,IAAI,KAAa;AAC/B,QAAK,MAAM,WAAW,UAAU;AAC9B,WAAO,IACL,GAAG,OAAO,IAAI,oBAAoB,CAAC,GAAG,OAAO,IAAI,UAAU,IAAI,CAAC,GAAG,KAAK,SAAS,QAAQ,KAAK,EAAE,QAAQ,GACzG;AACD,eAAW,MAAM,QAAQ,KAAK,QAAQ,CACpC,OAAM,IAAI,KAAK,QAAQ,KAAK,CAAC;;GAIjC,MAAM,aAAyB;IAC7B,IAAI;IACJ;IACA;IACD;AAED,eAAY,IAAI,SAAS,WAAW;AAEpC,OAAI,iBAAiB;IACnB,MAAM,YAAY,MAAM,KAAK,MAAM;AACnC,oBAAgB,IAAI,UAAU;;AAGhC,SAAM,uBAAuB;;EAG/B,MAAM,iBAAiB,SAAgC;GACrD,MAAM,aAAa,YAAY,IAAI,QAAQ;AAC3C,OAAI,CAAC,WAAY;AAEjB,OAAI,gBACF,iBAAgB,QAAQ,WAAW,SAAS;AAG9C,QAAK,MAAM,YAAY,WAAW,MAChC,wBAAuB,WAAW,SAAS;AAG7C,eAAY,OAAO,QAAQ;AAC3B,mBAAgB,OAAO;;EAGzB,MAAM,QAAuB;AAC3B,OAAI,CAAC,cACH,OAAM,YAAY;AAEpB,SAAM,uBAAuB;;EAG/B;EAEA,QAAQ,UAA+B;AACrC,mBAAgB;;EAGlB;EACA;EAEA,6BAAyC;AACvC,UAAO,uBAAuB,0BAA0B;;EAG1D,iBAA8B;GAC5B,IAAI,YAAY;AAChB,QAAK,MAAM,SAAS,YAAY,QAAQ,CACtC,cAAa,MAAM,MAAM;GAG3B,MAAM,QAAQ,uBAAuB,eAAe;AAEpD,UAAO;IACL,YAAY,iBAAiB,oBAAoB;IACjD,YAAY,YAAY;IACxB;IACA,qBAAqB,MAAM;IAC5B;;EAGH,mBAAmB,UAA4B;AAC7C,qBAAkB;;EAErB;;;;;;;;;;;;;;ACzmBH,SAAgB,wBAAwB,QAKlB;CACpB,MAAM,EAAE,aAAa,QAAQ,aAAa,EAAE,EAAE,kBAAkB;CAChE,MAAM,UAAU,KAAK,KAAK,YAAY,EAAE,YAAY;AACpD,MAAG,UAAU,SAAS,EAAE,WAAW,MAAM,CAAC;CAE1C,MAAM,WAIF;EAAE,UAAU,EAAE;EAAE,UAAU,EAAE;EAAE,UAAU,EAAE;EAAE;CAEhD,IAAI,UAAoC;CACxC,MAAM,mBAAqC,EAAE;CAG7C,MAAM,oBAAoB,eAAe,+BAA+B,IAAI,EAAE;CAG9E,SAAS,mBAAmB,KAAkC;AAC5D,SAAO,WAAW,QAAQ,MAAO,EAAuB,aAAa,SAAS,IAAI,CAAC;;CAGrF,SAAS,eAA+C;EACtD,MAAM,cAAc,YAAY;AAChC,MAAI,CAAC,YAAa,QAAO;EAEzB,MAAM,aAAa,YAAY;EAC/B,MAAM,cAAc,YAAY;AAChC,SAAO;GACL,MAAM,WAAW;GACjB,aAAa,cACT;IACE,UAAU,YAAY,KAAK;IAC3B,WAAW,YAAY;IACvB,eAAe,YAAY;IAC5B,GACD;GACJ,cAAc,WAAW;GACzB,eAAe,WAAW;GAC1B,YAAY,WAAW;GACxB;;CAOH,eAAe,yBACb,KACA,WACA,UACe;EACf,MAAM,UAAU,iBAAiB,IAAI;AACrC,UAAQ,gBAAgB,aAAa,EAAE;AAGvC,MAAI,CAAC,IAAI,YACP;EAGF,MAAM,cAAc,IAAI;AACxB,QAAM,QAAQ,WACZ,OAAO,QAAQ,SAAS,MAAM,CAAC,IAAI,OAAO,CAAC,UAAU,UAAU;AAC7D,OAAI;AACF,YAAQ,gBAAgB,WAAW,YAAY,MAAM,YAAY;KAC/D;KACA;KACA,QAAQ,SAAS,WAAW;KAC5B,SAAS,SAAS,kBAAkB,IAAI,SAAS,IAAI,EAAE;KACxD,CAAC;YACK,OAAO;AACd,WAAO,MACL,yBAAyB,OAAO,KAAK,SAAS,CAAC,MAAM,UAAU,kBAAkB,IAAI,KACtF;AACD,WAAO,MAAM,OAAO,MAAM,CAAC;;IAE7B,CACH;AAGD,MAAI,8BAA8B,OAAO,OAAO,IAAI,6BAA6B,WAC/E,KAAI;AACF,WAAQ,yBAAyB,aAAa,MAAM,IAAI,yBAAyB;IAC/E;IACA,OAAO,QAAQ,gBAAgB;IAChC,CAAC;WACK,OAAO;AACd,UAAO,MACL,uCAAuC,OAAO,KAAK,UAAU,CAAC,kBAAkB,IAAI,KACrF;AACD,UAAO,MAAM,OAAO,MAAM,CAAC;;MAG7B,SAAQ,yBAAyB,aAAa,QAAQ,gBAAgB;;CAI1E,eAAe,yBACb,KACA,WACA,WACe;EACf,MAAM,UAAU,iBAAiB,IAAI;AACrC,UAAQ,gBAAgB,aAAa,EAAE;AAGvC,MAAI,CAAC,IAAI,gBACP;EAGF,MAAMC,oBAAkB,IAAI;AAE5B,QAAM,QAAQ,WACZ,OAAO,QAAQ,UAAU,CAAC,IAAI,OAAO,CAAC,cAAc,cAAc;AAChE,OAAI;AACF,YAAQ,gBAAgB,WAAW,gBAAgB,MAAMA,kBAAgB;KACvE;KACA;KACD,CAAC;YACK,OAAO;AACd,WAAO,MACL,6BAA6B,OAAO,KAAK,aAAa,CAAC,MAAM,UAAU,kBAAkB,IAAI,KAC9F;AACD,WAAO,MAAM,OAAO,MAAM,CAAC;;IAE7B,CACH;AAGD,MAAI,8BAA8B,OAAO,OAAO,IAAI,6BAA6B,WAC/E,KAAI;AACF,WAAQ,yBAAyB,aAAa,MAAM,IAAI,yBAAyB;IAC/E;IACA,WAAW,QAAQ,gBAAgB;IACpC,CAAC;WACK,OAAO;AACd,UAAO,MACL,uCAAuC,OAAO,KAAK,UAAU,CAAC,kBAAkB,IAAI,KACrF;AACD,UAAO,MAAM,OAAO,MAAM,CAAC;;MAG7B,SAAQ,yBAAyB,aAAa,QAAQ,gBAAgB;;CAI1E,eAAe,iBAAiB,KAAsC;EACpE,MAAM,UAAU,iBAAiB,IAAI;AAGrC,MAAI,CAAC,IAAI,gBACP;EAGF,MAAM,kBAAkB,IAAI;AAE5B,QAAM,QAAQ,WACZ,OAAO,QAAQ,SAAS,SAAS,CAAC,IAAI,OAAO,CAAC,YAAY,cAAc;AACtE,OAAI;AACF,YAAQ,gBAAgB,cAAc,MAAM,gBAAgB,SAAS;YAC9D,OAAO;AACd,WAAO,MACL,6BAA6B,OAAO,KAAK,SAAS,KAAK,CAAC,kBAAkB,IAAI,KAC/E;AACD,WAAO,MAAM,OAAO,MAAM,CAAC;;IAE7B,CACH;;CAGH,eAAe,UAAU,KAAsC;EAC7D,MAAM,UAAU,iBAAiB,IAAI;EAErC,MAAM,kBAAsD,EAAE;EAC9D,MAAM,kBAAsD,EAAE;AAG9D,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,QAAQ,yBAAyB,CAC/E,iBAAgB,KAAK;GACnB;GACA;GACD,CAAC;AAIJ,OAAK,MAAM,CAAC,WAAW,cAAc,OAAO,QAAQ,QAAQ,yBAAyB,CACnF,iBAAgB,KAAK;GACnB;GACA;GACD,CAAC;EAIJ,MAAM,QAAiC,EACrC,MAAM,cAAc,EACrB;AAED,MAAI,cAAc,KAAK,WAAW,CAChC,OAAM,WAAW;AAEnB,MAAI,cAAc,KAAK,WAAW,CAChC,OAAM,WAAW;AAEnB,MAAI,cAAc,KAAK,WAAW,CAChC,OAAM,WAAW,OAAO,OAAO,QAAQ,gBAAgB;EAIzD,MAAM,SAAS,MAAM,IAAI,UAAU;GAC1B;GACP,SAAS,KAAK,KAAK,SAAS,IAAI,GAAG;GACnC,YAAY,OAAO;GACpB,CAAC;AAGF,QAAM,oBAAoB,IAAI,IAAI,OAAO;;;;;;CAW3C,SAAS,oBAA6C;AACpD,SAAO,OAAO,QAAQ,SAAS,SAAS,CAAC,KAAK,CAAC,WAAW,WAAW;GACnE;GACA,OAAO,KAAK;GACZ,YAAY,IAAI,IAAI,OAAO,QAAQ,KAAK,WAAW,CAAC;GACpD,mBAAmB,KAAK;GACzB,EAAE;;;;;;CAOL,SAAS,oBAA6C;AACpD,SAAO,OAAO,QAAQ,SAAS,SAAS,CAAC,KAAK,CAAC,WAAW,gBAAgB;GACxE;GACA;GACD,EAAE;;;;;;;;CASL,eAAe,mBACb,QACA,UACe;AAEf,MAAI,CADS,OAAO,UACT;EAEX,MAAM,gBAAgB,KAAK,KAAK,SAAS,OAAO,GAAG;EACnD,MAAM,OAAO,cAAc;EAC3B,MAAM,WAAW,mBAAmB;EAEpC,IAAI;AAEJ,UAAQ,UAAR;GACE,KAAK;AACH,aAAS,MAAM,OAAO,gBAAiB;KACrC;KACA;KACA,SAAS;KACT,YAAY,OAAO;KACnB,cAAc,OAAO;KACtB,CAAC;AACF;GACF,KAAK;AACH,aAAS,MAAM,OAAO,gBAAiB;KACrC;KACA,WAAW,mBAAmB;KAC9B;KACA,SAAS;KACT,YAAY,OAAO;KACnB,cAAc,OAAO;KACtB,CAAC;AACF;GACF,KAAK;AACH,aAAS,MAAM,OAAO,gBAAiB;KACrC;KACA,WAAW,mBAAmB;KAC9B,WAAW,EAAE,GAAG,SAAS,UAAU;KACnC;KACA,SAAS;KACT,YAAY,OAAO;KACnB,cAAc,OAAO;KACtB,CAAC;AACF;;AAGJ,QAAM,oBAAoB,OAAO,IAAI,OAAO;;;;;;;;;CAU9C,eAAe,cACb,UACA,SACe;EACf,MAAM,UAAU,kBAAkB,QAAQ,MAAM,EAAE,aAAa,KAAK;AACpE,MAAI,QAAQ,WAAW,EAAG;EAC1B,MAAM,UAAU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,WAAW;AAC5B,OAAI;AACF,UAAM,mBAAmB,QAAQ,SAAS;YACnC,OAAO;AACd,WAAO,MAAM,2BAA2B,OAAO,KAAK,OAAO,GAAG,CAAC,IAAI,SAAS,GAAG;AAC/E,WAAO,MAAM,OAAO,MAAM,CAAC;AAC3B,QAAI,CAACC,QACH,OAAM;;IAGV,CACH;AACD,MAAI,CAACA,SAAO;GACV,MAAM,WAAW,QAAQ,QAAQ,MAAkC,EAAE,WAAW,WAAW;AAC3F,OAAI,SAAS,SAAS,EACpB,OAAM,IAAI,eAAe,SAAS,KAAK,MAAM,EAAE,OAAO,CAAC;;;;;;;;CAc7D,eAAe,oBAAoB,UAAkB,QAAwC;AAC3F,QAAM,QAAQ,IACZ,OAAO,MAAM,IAAI,OAAO,SAAS;AAC/B,QAAG,UAAU,KAAK,QAAQ,KAAK,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AAC1D,UAAO,IAAI,SAAe,WAAS,WAAW;AAC5C,QAAI,KAAK,gBAAgBC,KAAG,WAAW,KAAK,KAAK,EAAE;KACjD,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK,KAAK;AAC5D,YAAO,MAAM,GAAG,SAAS,oBAAoB,eAAe;AAC5D,YAAOC,WAAS;;AAGlB,SAAG,UAAU,KAAK,MAAM,KAAK,UAAU,QAAQ;AAC7C,SAAI,KAAK;MACP,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK,KAAK;AAC5D,aAAO,MAAM,sBAAsB,OAAO,KAAK,aAAa,GAAG;AAC/D,aAAO,MAAM,OAAO,IAAI,CAAC;AACzB,aAAO,IAAI;YACN;MACL,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK,KAAK;AAC5D,aAAO,IAAI,GAAG,SAAS,eAAe,OAAO,QAAQ,aAAa,GAAG;AAErE,UAAI,KAAK,WACP,MAAG,MAAM,KAAK,MAAM,MAAQ,aAAa;AACvC,WAAI,UAAU;QACZ,MAAMC,iBAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK,KAAK;AAC5D,eAAO,MACL,0CAA0C,OAAO,KAAKA,eAAa,GACpE;AACD,eAAO,MAAM,OAAO,SAAS,CAAC;AAC9B,eAAO,SAAS;aAEhB,YAAS;QAEX;UAEF,YAAS;;MAGb;KACF;IACF,CACH;;CAOH,eAAe,iBAAiB,KAAsC;AACpE,mBAAiB,IAAI,MAAM;GACzB,iBAAiB,EAAE;GACnB,iBAAiB,EAAE;GACnB,0BAA0B,EAAE;GAC5B,0BAA0B,EAAE;GAC5B,iBAAiB,EAAE;GACpB;AAGD,MAAI,cAAc,KAAK,WAAW,CAChC,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,SAAS,SAAS,CAChE,OAAM,yBAAyB,KAAK,WAAW,MAAM;AAKzD,MAAI,cAAc,KAAK,WAAW,CAChC,MAAK,MAAM,CAAC,WAAW,cAAc,OAAO,QAAQ,SAAS,SAAS,CACpE,OAAM,yBAAyB,KAAK,WAAW,UAAU;AAK7D,MAAI,cAAc,KAAK,WAAW,CAChC,OAAM,iBAAiB,IAAI;AAI7B,QAAM,UAAU,IAAI;;CAGtB,eAAe,cAAc,MAAmB,SAA+B;EAC7E,MAAM,UAAU,MAAM,QAAQ,WAC5B,KAAK,IAAI,OAAO,QAAQ;AACtB,SAAM,SAAS,sBAAsB,IAAI,MAAM,YAAY;AACzD,QAAI;AACF,WAAM,iBAAiB,IAAwB;aACxC,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAO,KAAK,IAAI,GAAG,GAAG;AACjE,YAAO,MAAM,OAAO,MAAM,CAAC;AAC3B,SAAI,CAACH,QACH,OAAM;;KAGV;IACF,CACH;AACD,MAAI,CAACA,SAAO;GACV,MAAM,WAAW,QAAQ,QAAQ,MAAkC,EAAE,WAAW,WAAW;AAC3F,OAAI,SAAS,SAAS,EACpB,OAAM,IAAI,eAAe,SAAS,KAAK,MAAM,EAAE,OAAO,CAAC;;;CAK7D,eAAe,sBAAqC;AAClD,SAAO,SAAS;AAChB,SAAO,KAAK,qDAAqD,EAC/D,MAAM,UACP,CAAC;AACF,SAAO,SAAS;AAGhB,MAAI,QACF,OAAM,QAAQ,MAAM;EAItB,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE;EAClC,MAAM,MAAM;GACV,GAAG,QAAQ;GACX,0BACE,SAAS,QAAQ,IAAI,2BAA2B,KAAK,GAAG,GAAG,GAC3D,UAAU;GACb;EAED,MAAM,QAAQ,MAAM,QAAQ,KAAK,IAAI,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,EAAE;GAC/D,OAAO;GACP;GACA,UAAU;GACX,CAAC;EAGF,MAAM,iBAAiB,WAA2B;AAChD,SAAM,KAAK,OAAO;;AAGpB,UAAQ,GAAG,UAAU,cAAc;AACnC,UAAQ,GAAG,WAAW,cAAc;AAGpC,QAAM,GAAG,SAAS,SAAS;AACzB,WAAQ,KAAK,QAAQ,EAAE;IACvB;;AAKJ,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EAEA,MAAM,SAAS,SAA+B;AAC5C,UAAO,SAAS;AAChB,UAAO,IAAI,+BAA+B,OAAO,UAAU,YAAY,OAAO,KAAK,GAAG;GAEtF,MAAM,MAAM;AAGZ,SAAM,SAAS,8BAA8B,OAAO,SAAS;AAC3D,SAAK,aAAa,4BAA4B,IAAI,iBAAiB,OAAO;AAC1E,SAAK,MAAMI,QAAM,IAAI,kBAAkB;KACrC,MAAM,YAAYA,KAAG;AACrB,WAAM,SAAS,sBAAsB,aAAa,YAAY;AAC5D,UAAI;AACF,aAAMA,KAAG,WAAW;AAIpB,aAAMA,KAAG,yBAAyB;AAElC,gBAAS,SAAS,aAAa;QAC7B,OAAOA,KAAG;QACV,YAAYA,KAAG;QACf,mBAAmBA,KAAG;QACvB;eACM,OAAO;AACd,cAAO,MAAM,4CAA4C,OAAO,KAAK,UAAU,GAAG;AAClF,cAAO,MAAM,OAAO,MAAM,CAAC;AAC3B,WAAI,CAACJ,QACH,OAAM;;OAGV;;KAEJ;GAIF,MAAM,EAAE,qBAAqB,oBAAoB,MAAM,SACrD,wBACA,YAAY;IACV,MAAMK,wBAAsB,4BAC1B,eACA,IAAI,kBACJ,OAAO,KACR;AAMD,WAAO;KAAE;KAAqB,iBAJ5B,IAAI,oBACHA,sBAAoB,SAAS,IAC1B,sBAAsB,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,GAChD;KACyC;KAElD;AAGD,OAAI,IAAI,YACN,OAAM,SAAS,kCAAkC,YAC/C,IAAI,YAAa,mBAAmB,CACrC;AAIH,OAAI,IAAI,iBAAiB,SAAS,KAAK,oBAAoB,SAAS,EAClE,QAAO,SAAS;GAIlB,MAAM,qBAAqB,mBAAmB,WAAW;GACzD,MAAM,qBAAqB,kBAAkB,MAAM,MAAM,EAAE,mBAAmB,KAAK;AACnF,OAAI,mBAAmB,SAAS,KAAK,oBAAoB;AACvD,UAAM,SAAS,4BAA4B,YAAY;AACrD,WAAM,QAAQ,IAAI,CAChB,cAAc,oBAAoBL,QAAM,EACxC,cAAc,mBAAmBA,QAAM,CACxC,CAAC;MACF;AACF,WAAO,SAAS;;AAIlB,SAAM,SAAS,0BAA0B,YAAY;AACnD,SAAK,MAAM,mBAAmB,IAAI,kBAAkB;KAClD,MAAM,YAAY,gBAAgB;AAClC,WAAM,SAAS,0BAA0B,aAAa,YAAY;AAChE,UAAI;AACF,aAAM,gBAAgB,eAAe;AACrC,gBAAS,SAAS,aAAa,EAAE;AACjC,cAAO,QAAQ,gBAAgB,UAAU,CAAC,SAAS,CAAC,GAAG,cAAc;AACnE,iBAAS,SAAS,WAAW,SAAS,QAAQ;SAC9C;eACK,OAAO;AACd,cAAO,MACL,gDAAgD,OAAO,KAAK,UAAU,GACvE;AACD,cAAO,MAAM,OAAO,MAAM,CAAC;AAC3B,WAAI,CAACA,QACH,OAAM;;OAGV;;KAEJ;GAGF,MAAM,sBAAsB,mBAAmB,WAAW;GAC1D,MAAM,qBAAqB,kBAAkB,MAAM,MAAM,EAAE,mBAAmB,KAAK;AACnF,OAAI,oBAAoB,SAAS,KAAK,oBAAoB;AACxD,UAAM,SAAS,6BAA6B,YAAY;AACtD,WAAM,QAAQ,IAAI,CAChB,cAAc,qBAAqBA,QAAM,EACzC,cAAc,mBAAmBA,QAAM,CACxC,CAAC;MACF;AACF,WAAO,SAAS;;AAIlB,SAAM,SAAS,0BAA0B,YAAY;AACnD,QAAI,iBAAiB;AACnB,WAAM,gBAAgB,eAAe;AAErC,SAAI,oBAAoB,SAAS,EAC/B,OAAM,gBAAgB,wBAAwB,CAAC,GAAG,oBAAoB,CAAC;;IAI3E,MAAM,eAAe,iBAAiB,aAAa,EAAE;AACrD,WAAO,QAAQ,aAAa,CAAC,SAAS,CAAC,KAAK,cAAc;AACxD,cAAS,SAAS,OAAO;MACzB;KACF;GAGF,MAAM,sBAAsB,mBAAmB,WAAW;GAC1D,MAAM,qBAAqB,kBAAkB,MAAM,MAAM,EAAE,mBAAmB,KAAK;AACnF,OAAI,oBAAoB,SAAS,KAAK,oBAAoB;AACxD,UAAM,SAAS,6BAA6B,YAAY;AACtD,WAAM,QAAQ,IAAI,CAChB,cAAc,qBAAqBA,QAAM,EACzC,cAAc,mBAAmBA,QAAM,CACxC,CAAC;MACF;AACF,WAAO,SAAS;;;EAIpB,MAAM,QAAuB;AAC3B,aAAU,yBAAyB;AAGnC,WAAQ,yBAAyB;AAC/B,yBAAqB;KACrB;AAGF,SAAM,QAAQ,cAAc,UAAU,CAAC,OAAO,KAAK,CAAC;GAGpD,MAAM,MAAM;AAGZ,QAAK,MAAMI,QAAM,IAAI,kBAAkB;IACrC,MAAM,cAAcA,KAAG;AACvB,UAAM,SAAS,cAAc,YAAY,eAAeA,KAAG,OAAO,MAAM;;AAI1E,QAAK,MAAM,mBAAmB,IAAI,kBAAkB;IAClD,MAAM,oBAAoB,gBAAgB;AAC1C,UAAM,SAAS,cACb,YAAY,qBACZ,gBAAgB,UAAU,MAC3B;;AAIH,SAAM,IAAI,cAAc,GAAG;;EAE9B;;;;;;;AAQH,eAAsBE,WAAS,SAA2B;AACxD,QAAO,SAAS,YAAY,OAAO,aAAa;EAE9C,MAAM,EAAE,QAAQ,YAAY,YAAY,MAAM,SAAS,uBAAuB,YAC5E,WAAW,SAAS,WAAW,CAChC;EACD,MAAMN,UAAQ,SAAS,SAAS;AAEhC,WAAS,aAAa,kBAAkBA,QAAM;AAC9C,WAAS,aAAa,6BAA6B,WAAW,OAAO;AAGrE,QAAM,SAAS,8BAA8B,YAC3C,kBAAkB;GAAE;GAAQ,YAAY,OAAO;GAAM,CAAC,CACvD;EAGD,IAAI;AACJ,MAAI,QAAQ,SAAS,EACnB,iBAAgB,IAAI,cAAc,QAAQ;EAI5C,MAAM,cAAc,kBAAkB;GAAE;GAAQ;GAAe,CAAC;AAEhE,WAAS,aAAa,YAAY,YAAY,OAAO,KAAK;EAE1D,MAAM,UAAU,wBAAwB;GAAE;GAAa;GAAQ;GAAY;GAAe,CAAC;AAC3F,QAAM,QAAQ,SAASA,QAAM;AAC7B,MAAIA,QACF,OAAM,QAAQ,OAAO;GAEvB;;;;;;;;;;ACjxBJ,SAAS,gBAAgB,MAAoC;AAC3D,QAAO;EACL,MAAM,KAAK;EACX,UAAU,KAAK;EACf,cAAc,KAAK;EACnB,WAAW,KAAK,YAAY,cAAc,KAAK,UAAU,GAAG;EAC5D,WAAW,KAAK,YAAY,cAAc,KAAK,UAAU,GAAG;EAC5D,YAAY,OAAO,YACjB,OAAO,QAAQ,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK,WAAW,CAAC,KAAK,OAAO,aAAa,MAAM,CAAC,CAAC,CAC3F;EACF;;;;;;;AAQH,eAAsB,iBACpB,SAC4B;CAM5B,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CAGF,MAAM,EAAE,WAAW,MAAM,WAAW,SAAS,WAAW;CACxD,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AACF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;AAcpF,SAVqB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACpE,MAAM,EAAE,cAAc,kBAAkB,MAAM,OAAO,qBAAqB;GACxE;GACA;GACA,UAAU;GACV,eAAe,YAAY;GAC5B,CAAC;AACF,SAAO,CAAC,cAAc,cAAc;GACpC,EAEkB,IAAI,gBAAgB;;AAG1C,MAAaO,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAElC,MAAM,eAAe,MAAM,iBAAiB;GAC1C,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;AAGF,SAAO,IAAI,cAAc,EAAE,SAAS;GAAE,WAAW;GAAM,WAAW;GAAM,EAAE,CAAC;GAC3E;CACH,CAAC;;;;;;;;;AClFF,eAAsB,oBACpB,SAC+B;CAM/B,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAGF,MAAM,EAAE,WAAW,MAAM,WAAW,QAAQ,WAAW;CACvD,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AACF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;CAIpF,MAAM,EAAE,gBAAgB,MAAM,OAAO,mBAAmB;EACtD;EACA,eAAe,YAAY;EAC3B,MAAM,QAAQ;EACf,CAAC;AACF,KAAI,CAAC,YACH,OAAM,IAAI,MAAM,gBAAgB,QAAQ,KAAK,aAAa;CAI5D,MAAM,OAAO,MAAM,sBACjB,YAAY,KACZ,YAAY,UACZ,YAAY,aACb;CACD,MAAM,4BAAY,IAAI,MAAM;AAC5B,WAAU,WAAW,UAAU,YAAY,GAAG,KAAK,WAAW;AAE9D,QAAO;EACL,aAAa,KAAK;EAClB,WAAW,KAAK;EAChB,WAAW,UAAU,aAAa;EACnC;;AAGH,MAAa,eAAe,cAAc;CACxC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,EAAE;GACpB,YAAY;GACZ,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAElC,MAAM,QAAQ,MAAM,oBAAoB;GACtC,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;EAIF,MAAM,YAAY;GAChB,cAAc,MAAM;GACpB,YAAY,MAAM;GAClB,YAAY,MAAM;GACnB;AACD,SAAO,IAAI,UAAU;GACrB;CACH,CAAC;;;;ACtGF,MAAM,qBAAqB,cAAkD;AAC3E,SAAQ,WAAR;EACE,KAAK,2BAA2B,mBAC9B,QAAO;EACT,KAAK,2BAA2B,cAC9B,QAAO;EACT,QACE,QAAO;;;;;;;;AA4Bb,SAAgB,mBAAmB,QAA4C;AAC7E,QAAO;EACL,MAAM,OAAO;EACb,aAAa,OAAO;EACpB,UAAU,OAAO;EACjB,YAAY,OAAO,WAAW,IAAI,kBAAkB;EACpD,cAAc,OAAO;EACrB,WAAW,OAAO,YAAY,cAAc,OAAO,UAAU,GAAG;EACjE;;;;;;;AAQH,SAAgB,0BAA0B,QAAmD;AAC3F,QAAO;EACL,MAAM,OAAO;EACb,aAAa,OAAO;EACpB,UAAU,OAAO;EACjB,cAAc,OAAO;EACrB,YAAY,OAAO,WAAW,IAAI,kBAAkB;EACpD,cAAc,OAAO;EACrB,WAAW,OAAO,YAAY,cAAc,OAAO,UAAU,GAAG;EACjE;;;;;;;;;;AC5CH,eAAsB,gBACpB,SACkC;CAKlC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;CAEF,MAAM,EAAE,WAAW,MAAM,WAAW,QAAQ,WAAW;CACvD,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AACF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;AAGpF,KAAI;EACF,MAAM,EAAE,iBAAiB,MAAM,OAAO,oBAAoB;GACxD;GACA,eAAe,YAAY;GAC3B,MAAM,QAAQ;GACf,CAAC;AAEF,SAAO,0BAA0B,aAAc;UACxC,OAAO;AACd,MAAI,iBAAiB,gBAAgB,MAAM,SAAS,KAAK,SACvD,OAAM,IAAI,MAAM,kBAAkB,QAAQ,KAAK,cAAc;AAE/D,QAAM;;;AAIV,MAAaC,eAAa,cAAc;CACtC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,EAAE;GACpB,YAAY;GACZ,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,cAAc,MAAM,gBAAgB;GACxC,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;AAEF,SAAO,IAAI,YAAY;GACvB;CACH,CAAC;;;;;;;;;AChEF,eAAsB,kBACpB,SAC6B;CAK7B,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CAEF,MAAM,EAAE,WAAW,MAAM,WAAW,SAAS,WAAW;CACxD,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AACF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;AAapF,SAVsB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACrE,MAAM,EAAE,eAAe,kBAAkB,MAAM,OAAO,sBAAsB;GAC1E;GACA;GACA,UAAU;GACV,eAAe,YAAY;GAC5B,CAAC;AACF,SAAO,CAAC,eAAe,cAAc;GACrC,EAEmB,IAAI,mBAAmB;;AAG9C,MAAaC,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,gBAAgB,MAAM,kBAAkB;GAC5C,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;AAEF,SAAO,IAAI,cAAc;GACzB;CACH,CAAC;;;;AC5CF,eAAeC,eAAY,SAAyB;CAKlD,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CACF,MAAM,EAAE,WAAW,MAAM,WAAW,SAAS,WAAW;AAExD,QAAO;EACL;EACA;EACA,aAJkB,kBAAkB,EAAE,QAAQ,CAAC;EAK/C;EACD;;AAGH,eAAe,WACb,QACA,aACA,aACA,QACA,SACA;CAEA,MAAM,MAAmB;EACvB;EACA;EACA;EACA,YAAY;EACZ;EACD;CACD,MAAM,WAAW,MAAM,aAAa,IAAI;CACxC,MAAM,gBAAgB,MAAM,kBAAkB,IAAI;CAClD,MAAM,MAAM,MAAM,QAAQ,IAAI;CAC9B,MAAM,OAAO,MAAM,SAAS,IAAI;CAChC,MAAM,WAAW,MAAM,aAAa,IAAI;CACxC,MAAM,MAAM,MAAM,gBAAgB,IAAI;CACtC,MAAM,WAAW,MAAM,aAAa,IAAI;CACxC,MAAM,WAAW,MAAM,aAAa,QAAQ,aAAa,YAAY,MAAM,EAAE,EAAE,EAAE,CAAC;CAClF,MAAM,mBAAmB,MAAM,qBAAqB,QAAQ,aAAa,YAAY,MAAM,EAAE,CAAC;CAC9F,MAAM,gBAAgB,MAAM,kBAAkB,IAAI;AAElD,KACE,SAAS,UAAU,QAAQ,QAAQ,WAAW,KAC9C,cAAc,UAAU,QAAQ,WAAW,KAC3C,IAAI,UAAU,QAAQ,QAAQ,WAAW,KACzC,KAAK,UAAU,QAAQ,QAAQ,WAAW,KAC1C,SAAS,UAAU,QAAQ,QAAQ,WAAW,KAC9C,IAAI,QAAQ,WAAW,KACvB,SAAS,UAAU,QAAQ,WAAW,KACtC,SAAS,UAAU,QAAQ,WAAW,KACtC,iBAAiB,UAAU,QAAQ,WAAW,KAC9C,cAAc,eAAe,QAAQ,WAAW,KAChD,cAAc,gBAAgB,QAAQ,WAAW,EAEjD;AAIF,KAAI,QACF,OAAM,SAAS;AAIjB,OAAM,cAAc,QAAQ,UAAU,SAAS;AAC/C,OAAM,cAAc,QAAQ,UAAU,SAAS;AAC/C,OAAM,mBAAmB,QAAQ,eAAe,SAAS;AACzD,OAAM,iBAAiB,QAAQ,KAAK,SAAS;AAC7C,OAAM,cAAc,QAAQ,UAAU,mBAAmB;AACzD,OAAM,cAAc,QAAQ,UAAU,kBAAkB;AACxD,OAAM,UAAU,QAAQ,MAAM,mBAAmB;AACjD,OAAM,UAAU,QAAQ,MAAM,kBAAkB;AAChD,OAAM,SAAS,QAAQ,KAAK,mBAAmB;AAC/C,OAAM,SAAS,QAAQ,KAAK,kBAAkB;AAC9C,OAAM,cAAc,QAAQ,UAAU,mBAAmB;AACzD,OAAM,cAAc,QAAQ,UAAU,kBAAkB;AACxD,OAAM,sBAAsB,QAAQ,aAAa,kBAAkB,SAAS;AAC5E,OAAM,mBAAmB,QAAQ,eAAe,SAAS;;;;;;;AAQ3D,eAAsB,OAAO,SAAwC;CACnE,MAAM,EAAE,QAAQ,aAAa,aAAa,WAAW,MAAMA,eAAY,QAAQ;AAC/E,OAAM,WAAW,QAAQ,aAAa,aAAa,OAAO;;AAG5D,MAAaC,kBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,EAAE,QAAQ,aAAa,aAAa,WAAW,MAAMD,eAAY;GACrE,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;AAEF,SAAO,KAAK,6CAA6C,YAAY,KAAK,MAAM;AAChF,SAAO,SAAS;AAEhB,QAAM,WAAW,QAAQ,aAAa,aAAa,QAAQ,YAAY;AACrE,OAAI,CAAC,KAAK,KAKR;QAAI,CAJc,MAAM,OAAO,OAAO,kDAAkD;KACtF,MAAM;KACN,SAAS;KACV,CAAC,CAEA,OAAM,IAAI,MAAM,EAAE;;;QAGpB;SAGA,QAAO,QAAQ,mDAAmD;IAEpE;AAEF,SAAO,QAAQ,kDAAkD,YAAY,KAAK,IAAI;GACtF;CACH,CAAC;;;;AC/HF,SAAS,gBAAgB,KAAmC;AAC1D,QAAO;EACL,MAAM,IAAI;EACV,QAAQ,IAAI;EACZ,KAAK,IAAI;EACT,MAAM,IAAI;EACV,MAAM,IAAI;EACV,oBAAoB,IAAI;EACxB,sBAAsB,IAAI;EAC1B,WAAW,IAAI,aAAa,cAAc,IAAI,WAAW,GAAG;EAC5D,WAAW,IAAI,aAAa,cAAc,IAAI,WAAW,GAAG;EAC7D;;;;;;;AAQH,eAAsB,KAAK,SAA0C;CAMnE,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CAEF,MAAM,EAAE,WAAW,MAAM,WAAW,SAAS,WAAW;CACxD,MAAM,CAAC,eAAe,QAAQ,MAAM,QAAQ,IAAI,CAC9C,OAAO,aAAa,EAClB,aACD,CAAC,EACF,OAAO,eAAe;EACpB;EACA,iBAAiB,OAAO;EACzB,CAAC,CACH,CAAC;CACF,MAAM,EAAE,MAAM,GAAGE,cAAY,gBAAgB,KAAK,YAAa;AAE/D,QAAO;EACL;EACA;EACA,eAAe,cAAc,WAAW,QAAQ;EAChD,iBAAiB,cAAc,WAAW,UAAU;EACpD,GAAGA;EACJ;;AAGH,MAAa,cAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAElC,MAAMA,YAAU,MAAM,KAAK;GACzB,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GAClB,CAAC;AAEF,SAAO,IAAIA,UAAQ;GACnB;CACH,CAAC;;;;;;;;ACtGF,SAAgB,eAAe,aAA2B;AACxD,QAAO,KACL,QAAQ,YAAY,oFACrB;AACD,QAAO,SAAS;;;;;ACRlB,MAAM,iBAAiB;AAEvB,SAAS,uBAAuB,QAAgD;CAC9E,MAAM,aAAa,QAAQ,MAAM;AACjC,QAAO,cAAc,WAAW,SAAS,IAAI,aAAa;;;;;;AAO5D,SAAgB,6BAAiD;AAC/D,QAAO,uBAAuB,QAAQ,IAAI,OAAO,IAAI,uBAAuB,QAAQ,IAAI,OAAO;;;;;;AAOjG,SAAgB,mBAA2B;AACzC,QAAO,4BAA4B,IAAI;;AAGzC,SAAS,mBAAmB,QAG1B;CACA,MAAM,CAAC,SAAS,GAAG,QAAQ,OAAO,MAAM,CAAC,MAAM,MAAM;AAErD,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,2BAA2B;AAG7C,QAAO;EACL;EACA;EACD;;;;;;;;AASH,eAAsB,aACpB,UACA,SAAS,kBAAkB,EACT;CAClB,MAAM,EAAE,SAAS,SAAS,mBAAmB,OAAO;AAEpD,OAAM,IAAI,SAAe,WAAS,WAAW;EAC3C,MAAM,QAAQ,MAAM,SAAS,CAAC,GAAG,MAAM,SAAS,EAAE;GAChD,OAAO;GACP,UAAU;GACX,CAAC;AAEF,QAAM,KAAK,UAAU,UAAU,OAAO,MAAM,CAAC;AAC7C,QAAM,KAAK,UAAU,SAAS;AAC5B,OAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,eAAS;AACT;;AAEF,0BAAO,IAAI,MAAM,2BAA2B,KAAK,GAAG,CAAC;IACrD;GACF;AAEF,QAAO;;;;;;;AAQT,eAAsB,uBAAuB,UAAoC;CAC/E,MAAM,SAAS,4BAA4B;AAC3C,KAAI,CAAC,OACH,QAAO;AAGT,QAAO,MAAM,aAAa,UAAU,OAAO;;;;;;;;;;;;;;;;ACxC7C,SAAS,4BAA4B,MAA8C;CACjF,MAAM,qCAAqB,IAAI,KAA0B;CACzD,MAAM,sCAAsB,IAAI,KAA+C;CAC/E,MAAM,mCAAmB,IAAI,KAA2C;AAExE,MAAK,MAAM,UAAU,KAAK,QACxB,KAAI,OAAO,SAAS,oBAAoB,OAAO,WAAW;EACxD,MAAM,SAAS,OAAO;EACtB,MAAM,QAAQ,OAAO;AAGrB,MAAI,UAAU,SAAS,CAAC,OAAO,YAAY,MAAM,UAAU;AACzD,OAAI,CAAC,mBAAmB,IAAI,OAAO,SAAS,CAC1C,oBAAmB,IAAI,OAAO,0BAAU,IAAI,KAAK,CAAC;AAEpD,sBAAmB,IAAI,OAAO,SAAS,CAAE,IAAI,OAAO,UAAU;;AAIhE,MACE,UACA,SACA,OAAO,SAAS,UAChB,MAAM,SAAS,UACf,OAAO,iBACP,MAAM,eACN;GAEA,MAAM,eAAe,OAAO,cAAc,KAAK,MAAM,EAAE,MAAM;GAC7D,MAAM,cAAc,MAAM,cAAc,KAAK,MAAM,EAAE,MAAM;GAC3D,MAAM,YAAY,IAAI,IAAI,aAAa;GACvC,MAAM,WAAW,IAAI,IAAI,YAAY;AAIrC,OAFE,aAAa,MAAM,MAAM,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,YAAY,MAAM,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC,EAE1E;AACd,QAAI,CAAC,iBAAiB,IAAI,OAAO,SAAS,CACxC,kBAAiB,IAAI,OAAO,0BAAU,IAAI,KAAK,CAAC;AAElD,qBAAiB,IAAI,OAAO,SAAS,CAAE,IAAI,OAAO,WAAW;KAC3D;KACA;KACD,CAAC;;;YAGG,OAAO,SAAS,iBAAiB,OAAO,WAAW;EAC5D,MAAM,QAAQ,OAAO;AAIrB,MAAI,SAAS,MAAM,UAAU;AAC3B,OAAI,CAAC,oBAAoB,IAAI,OAAO,SAAS,CAC3C,qBAAoB,IAAI,OAAO,0BAAU,IAAI,KAAK,CAAC;AAErD,uBAAoB,IAAI,OAAO,SAAS,CAAE,IAAI,OAAO,WAAW,MAAM;;;AAK5E,QAAO;EAAE;EAAoB;EAAqB;EAAkB;;;;;;;;AAStE,SAAS,4BAA4B,UAA0B,MAA8B;CAC3F,MAAM,QAAQ,OAAO,OAAO,SAAS,MAAM;AAC3C,KAAI,MAAM,WAAW,EACnB,QAAO,qBAAqB,SAAS,UAAU;CAIjD,MAAM,uBAAuB,OACzB,4BAA4B,KAAK,GACjC;EACE,oCAAoB,IAAI,KAAK;EAC7B,qCAAqB,IAAI,KAAK;EAC9B,kCAAkB,IAAI,KAAK;EAC5B;CAGL,MAAM,mCAAmB,IAAI,KAA6B;CAG1D,MAAM,kBAA4B,EAAE;AACpC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,SAAS,kBAAkB,MAAM,qBAAqB;AAC5D,MAAI,OAAO,cAAe,kBAAiB,IAAI,YAAY;AAC3D,kBAAgB,KAAK,OAAO,QAAQ;;CAKtC,MAAM,UAAoB,CAAC,mBAAmB,wCAAwC;CAGtF,MAAM,0BAAoC,EAAE;AAC5C,KAAI,iBAAiB,IAAI,YAAY,CACnC,yBAAwB,KACtB,mEACD;AAEH,yBAAwB,KACtB,iJACD;AACD,KAAI,iBAAiB,IAAI,SAAS,CAChC,yBAAwB,KAAK,kEAAkE;AAuBjG,QAnBwB;EACtB;EACA;EACA;EACA;EACA;EACA;EACA;EACA,YAAY,QAAQ,KAAK,KAAK,CAAC;EAC/B;EACA,GAAG;EACH;EACA;EACA,GAAG;EACH;EACA;EACA;EACD,CAEY,KAAK,KAAK,GAAG;;;;;;;AAQ5B,SAAS,qBAAqB,WAA2B;AACvD,QACE;EACE;EACA;EACA,iBAAiB;EACjB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC,KAAK,KAAK,GAAG;;;;;;;;AAUnB,SAAS,kBACP,MACA,sBAKA;CACA,MAAM,aAAuB,EAAE;CAC/B,IAAI,gBAAgB;CACpB,IAAI,iBAAiB;AAGrB,YAAW,KAAK,6BAA6B;CAG7C,MAAM,2BACJ,qBAAqB,mBAAmB,IAAI,KAAK,KAAK,oBAAI,IAAI,KAAK;CAGrE,MAAM,sBAAsB,qBAAqB,oBAAoB,IAAI,KAAK,KAAK,oBAAI,IAAI,KAAK;CAGhG,MAAM,0BAA0B,qBAAqB,iBAAiB,IAAI,KAAK,KAAK,oBAAI,IAAI,KAAK;AAEjG,MAAK,MAAM,CAAC,WAAW,gBAAgB,OAAO,QAAQ,KAAK,OAAO,EAAE;AAClE,MAAI,cAAc,KAAM;EAIxB,MAAM,SAAS,kBAAkB,aAFJ,yBAAyB,IAAI,UAAU,EAC5C,wBAAwB,IAAI,UAAU,CACsB;AACpF,aAAW,KAAK,OAAO,UAAU,IAAI,OAAO,KAAK,GAAG;AACpD,kBAAgB,iBAAiB,OAAO;AACxC,mBAAiB,kBAAkB,OAAO;;AAK5C,MAAK,MAAM,CAAC,WAAW,gBAAgB,qBAAqB;EAE1D,MAAM,SAAS,kBAAkB,aAAa,MAAM,OAAU;AAC9D,aAAW,KAAK,OAAO,UAAU,IAAI,OAAO,KAAK,GAAG;AACpD,kBAAgB,iBAAiB,OAAO;AACxC,mBAAiB,kBAAkB,OAAO;;AAK5C,QAAO;EAAE,SAFO,KAAK,KAAK,KAAK,OAAO,WAAW,KAAK,KAAK,CAAC;EAE1C;EAAe;EAAgB;;AAGnD,SAAS,YAAY,WAGnB;AACA,SAAQ,WAAR;EACE,KAAK;EACL,KAAK;EACL,KAAK,UACH,QAAO;GAAE,MAAM;GAAU,eAAe;GAAO;EACjD,KAAK;EACL,KAAK;EACL,KAAK,SACH,QAAO;GAAE,MAAM;GAAU,eAAe;GAAO;EACjD,KAAK;EACL,KAAK,WACH,QAAO;GAAE,MAAM;GAAa,eAAe;GAAM;EACnD,KAAK;EACL,KAAK,UACH,QAAO;GAAE,MAAM;GAAW,eAAe;GAAO;EAClD,QACE,QAAO;GAAE,MAAM;GAAU,eAAe;GAAO;;;AAIrD,SAAS,gBAAgB,QAA0B;AACjD,QAAO,OAAO,KAAK,MAAM,IAAI,EAAE,GAAG,CAAC,KAAK,MAAM;;AAGhD,SAAS,6BACP,iBACA,QACQ;CAER,MAAM,aAAa,gBADD,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,gBAAgB,cAAc,GAAG,gBAAgB,YAAY,CAAC,CAAC,CACpD;CAC7C,MAAM,YAAY,gBAAgB,gBAAgB,YAAY;AAE9D,KAAI,OAAO,SAAS,CAAC,OAAO,SAC1B,QAAO,eAAe,WAAW,eAAe,UAAU,eAAe,UAAU;AAErF,KAAI,OAAO,MACT,QAAO,eAAe,WAAW,QAAQ,UAAU,QAAQ,UAAU;AAEvE,KAAI,CAAC,OAAO,SACV,QAAO,eAAe,WAAW,aAAa,UAAU,aAAa,UAAU;AAEjF,QAAO,cAAc,WAAW,IAAI,UAAU,IAAI,UAAU;;;;;;;;;AAU9D,SAAS,kBACP,QACA,sBACA,iBAKA;AAEA,KAAI,gBACF,QAAO;EACL,MAAM,6BAA6B,iBAAiB,OAAO;EAC3D,eAAe;EACf,gBAAgB;EACjB;CAIH,IAAI;CACJ,IAAI,gBAAgB;AAEpB,KAAI,OAAO,SAAS,QAAQ;EAC1B,MAAM,aAAa,OAAO,eAAe,KAAK,MAAM,EAAE,MAAM,IAAI,EAAE;AAClE,aAAW,WAAW,SAAS,IAAI,gBAAgB,WAAW,GAAG;QAC5D;EACL,MAAM,SAAS,YAAY,OAAO,KAAK;AACvC,aAAW,OAAO;AAClB,kBAAgB,OAAO;;CAIzB,IAAI,OAAO;AACX,KAAI,OAAO,MAGT,QADE,OAAO,SAAS,UAAU,OAAO,iBAAiB,OAAO,cAAc,SAAS,IAC7D,IAAI,SAAS,OAAO,GAAG,SAAS;AAIvD,KAAI,qBAIF,QAAO;EACL,MAAM,cAAc,KAAK,WAAW,KAAK,IAAI,KAAK;EAClD;EACA,gBAAgB;EACjB;AAGH,KAAI,CAAC,OAAO,SACV,QAAO,GAAG,KAAK;AAGjB,QAAO;EAAE;EAAM;EAAe,gBAAgB;EAAO;;;;;;;;;;AAWvD,eAAsB,iBACpB,UACA,eACA,iBACA,MACiB;CACjB,MAAM,UAAU,4BAA4B,UAAU,KAAK;CAC3D,MAAM,WAAW,qBAAqB,eAAe,iBAAiB,KAAK;AAC3E,OAAM,GAAG,UAAU,UAAU,QAAQ;AACrC,QAAO;;;;;;;;;;;;;;;;;;;AC7WT,eAAe,WAAW,UAAoC;AAC5D,KAAI;AACF,QAAM,GAAG,OAAO,SAAS;AACzB,SAAO;SACD;AACN,SAAO;;;;;;;;AASX,eAAe,oBAAoB,UAAiC;AAClE,KAAI,MAAM,WAAW,SAAS,CAC5B,OAAM,IAAI,MAAM,kCAAkC,WAAW;;;;;;;;;AAuBjE,eAAsB,mBACpB,UACA,eACA,iBAC+B;CAE/B,MAAM,eAAe,oBAAoB,eAAe,gBAAgB;AACxE,OAAM,GAAG,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;CAEjD,MAAM,WAAW,qBAAqB,eAAe,iBAAiB,SAAS;AAG/E,OAAM,oBAAoB,SAAS;AAEnC,OAAM,GAAG,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC;AAE/D,QAAO;EACL;EACA;EACD;;;;;;;;;;;AAYH,eAAsB,kBACpB,MACA,eACA,iBACA,kBACA,aAC6B;CAE7B,MAAM,eAAe,oBAAoB,eAAe,gBAAgB;AACxE,OAAM,GAAG,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;CAGjD,MAAM,eAAe,qBAAqB,eAAe,iBAAiB,OAAO;CACjF,MAAM,kBAAkB,qBAAqB,eAAe,iBAAiB,UAAU;CACvF,MAAM,kBAAkB,qBAAqB,eAAe,iBAAiB,KAAK;AAGlF,OAAM,oBAAoB,aAAa;AACvC,KAAI,KAAK,yBAAyB;AAChC,QAAM,oBAAoB,gBAAgB;AAC1C,QAAM,oBAAoB,gBAAgB;;AAI5C,KAAI,YACF,QAAO;EAAE,GAAG;EAAM;EAAa;AAIjC,OAAM,GAAG,UAAU,cAAc,KAAK,UAAU,MAAM,MAAM,EAAE,CAAC;CAE/D,MAAM,SAA6B;EACjC;EACA;EACD;AAGD,KAAI,KAAK,yBAAyB;EAChC,MAAM,gBAAgB,wBAAwB,KAAK;AACnD,QAAM,GAAG,UAAU,iBAAiB,cAAc;AAClD,SAAO,kBAAkB;AAKzB,QAAM,iBAAiB,kBAAkB,eAAe,iBAAiB,KAAK;AAC9E,SAAO,kBAAkB;;AAG3B,QAAO;;;;;;;AAQT,SAAS,wBAAwB,MAA6B;CAC5D,MAAM,UAAoB,EAAE;AAE5B,MAAK,MAAM,UAAU,KAAK,SAAS;EACjC,MAAM,SAAS,qBAAqB,OAAO;AAC3C,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,KAAI,QAAQ,WAAW,EACrB,SAAQ,KAAK;kDACiC;AAGhD,QAAO;0BACiB,KAAK,UAAU;;;;;;;;;;;;EAYvC,QAAQ,KAAK,OAAO,CAAC;;;;;;;;;AAUvB,SAAS,qBAAqB,QAAmC;AAC/D,KAAI,OAAO,SAAS,eAAe;AAEjC,MADc,OAAO,MACX,SACR,QAAO,iBAAiB,OAAO,UAAU,gBAAgB,OAAO,SAAS;;oBAE3D,OAAO,SAAS;;QAE5B,OAAO,UAAU;;;AAIrB,SAAO;;AAGT,KAAI,OAAO,SAAS,iBAElB,QAAO;CAGT,MAAM,SAAS,OAAO;CACtB,MAAM,QAAQ,OAAO;AAMrB,KAAI,CAAC,OAAO,YAAY,MAAM,SAC5B,QAAO,YAAY,OAAO,UAAU,OAAO,OAAO,SAAS;;oBAE3C,OAAO,SAAS;;QAE5B,OAAO,UAAU;;cAEX,OAAO,UAAU;;AAQ7B,KAAI,EAAE,OAAO,UAAU,WAAW,MAAM,UAAU,OAChD,QAAO,eAAe,OAAO,UAAU;;mBAExB,OAAO,SAAS;gBACnB,OAAO,UAAU;gBACjB,OAAO,UAAU;;;;;;qBAMZ,OAAO,SAAS;wBACb,OAAO,UAAU;gBACzB,OAAO,UAAU,cAAc,OAAO,UAAU;;;;wBAIxC,OAAO,SAAS;iBACvB,OAAO,UAAU,oBAAoB,OAAO,UAAU;;;;;AAQrE,KAAI,OAAO,SAAS,UAAU,MAAM,SAAS,QAAQ;EACnD,MAAM,eAAe,OAAO,iBAAiB,EAAE;EAC/C,MAAM,cAAc,MAAM,iBAAiB,EAAE;EAC7C,MAAM,gBAAgB,aAAa,QAAQ,MAAM,CAAC,YAAY,SAAS,EAAE,CAAC;AAC1E,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,eAAe,YAAY,MAAM;AACvC,UAAO,kDAAkD,cAAc,KAAK,KAAK,CAAC;;oBAEpE,OAAO,SAAS;aACvB,OAAO,UAAU,KAAK,aAAa;cAClC,OAAO,UAAU,YAAY,cAAc,KAAK,MAAM,IAAI,EAAE,GAAG,CAAC,KAAK,KAAK,CAAC;;;;AAMvF,KACE,OAAO,kBACP,MAAM,kBACN,OAAO,mBAAmB,MAAM,eAEhC,QAAO,gBAAgB,OAAO,UAAU,mBAAmB,OAAO,eAAe,MAAM,MAAM,eAAe;;;mBAG7F,OAAO,SAAS;iBAClB,MAAM,eAAe,MAAM,OAAO,SAAS,GAAG,OAAO,UAAU,MAAM,MAAM,eAAe;gBAC3F,OAAO,SAAS,SAAS,OAAO,SAAS,GAAG,OAAO,UAAU;cAC/D,MAAM,eAAe;cACrB,OAAO,SAAS,GAAG,OAAO,UAAU;;;;sBAI5B,OAAO,SAAS;eACvB,OAAO,UAAU;;;;AAM9B,QAAO;;;;;;;;;;;;;;;;;;ACvPT,eAAe,iBACb,YACA,kBACe;CAEf,MAAM,eAAe,WAAW,QAAQ,EAAE,oBAAoBC,KAAG,WAAW,cAAc,CAAC;AAE3F,KAAI,aAAa,WAAW,GAAG;AAC7B,SAAO,KAAK,2CAA2C;AACvD;;AAIF,QAAO,SAAS;AAChB,QAAO,KAAK,iDAAiD;AAC7D,MAAK,MAAM,EAAE,WAAW,mBAAmB,aACzC,QAAO,IAAI,OAAO,UAAU,IAAI,gBAAgB;AAElD,QAAO,SAAS;AAGhB,KAAI,CAAC,kBAAkB;AASrB,MAAI,CARiB,MAAM,OAAO,OAChC,sEACA;GACE,MAAM;GACN,SAAS;GACV,CACF,EAEkB;AACjB,UAAO,KAAK,uBAAuB;AACnC,WAAQ,KAAK,EAAE;;AAEjB,SAAO,SAAS;;AAIlB,MAAK,MAAM,EAAE,WAAW,mBAAmB,aACzC,KAAI;AACF,QAAMC,GAAW,GAAG,eAAe;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AACpE,SAAO,QAAQ,mCAAmC,OAAO,KAAK,UAAU,GAAG;UACpE,OAAO;AACd,SAAO,MAAM,oBAAoB,cAAc,IAAI,QAAQ;AAC3D,QAAM;;AAIV,QAAO,SAAS;AAChB,QAAO,KAAK,gEAAgE;AAC5E,QAAO,SAAS;;;;;;;AAQlB,eAAsB,SAAS,SAAyC;AACtE,gBAAe,qBAAqB;CAGpC,MAAM,EAAE,QAAQ,YAAY,MAAM,WAAW,QAAQ,WAAW;CAIhE,MAAM,2BAAsD,4BAC1D,QAJgB,KAAK,QAAQ,OAAO,KAAK,CAM1C;AAED,KAAI,yBAAyB,WAAW,GAAG;AACzC,SAAO,KAAK,uDAAuD;AACnE,SAAO,KACL,iGACD;AACD;;AAIF,KAAI,QAAQ,KACV,OAAM,iBAAiB,0BAA0B,QAAQ,IAAI;CAI/D,IAAI;AACJ,KAAI,QAAQ,SAAS,EACnB,iBAAgB,IAAI,cAAc,QAAQ;CAI5C,MAAM,EAAE,2CAAsB,MAAM,OAAO;CAC3C,MAAM,cAAcC,oBAAkB;EAAE;EAAQ;EAAe,CAAC;AAGhE,MAAK,MAAM,EAAE,WAAW,mBAAmB,0BAA0B;AACnE,SAAO,KAAK,yBAAyB,OAAO,KAAK,UAAU,GAAG;AAG9D,4BAA0B,eAAe,UAAU;EAGnD,MAAM,kBAAkB,YAAY,iBAAiB,MAAM,MAAM,EAAE,cAAc,UAAU;AAC3F,MAAI,CAAC,iBAAiB;AACpB,UAAO,KAAK,4CAA4C,UAAU,GAAG;AACrE;;AAIF,QAAM,gBAAgB,WAAW;AACjC,QAAM,gBAAgB,yBAAyB;EAE/C,MAAM,gBAAgB,gBAAgB;EAGtC,MAAM,kBAAkB,6BAA6B,eAAe,UAAU;EAG9E,IAAI,mBAA0C;AAC9C,MAAI;AACF,sBAAmB,kCAAkC,cAAc;UAC7D;AAIR,MAAI,CAAC,iBAEH,OAAM,wBAAwB,iBAAiB,cAAc;MAG7D,OAAM,yBAAyB,kBAAkB,iBAAiB,eAAe,QAAQ;;;;;;;;;AAW/F,eAAe,wBACb,UACA,eACe;CACf,MAAM,SAAS,MAAM,mBAAmB,UAAU,eAAe,sBAAsB;AAEvF,QAAO,QAAQ,oCAAoC;AACnD,QAAO,KAAK,WAAW,OAAO,WAAW;AACzC,QAAO,KAAK,YAAY,OAAO,KAAK,SAAS,MAAM,CAAC,SAAS;AAE7D,QAAO,IAAI,0EAA0E;;;;;;;;;;AAWvF,eAAe,yBACb,kBACA,iBACA,eACA,SACe;CAEf,MAAM,OAAO,iBAAiB,kBAAkB,gBAAgB;AAGhE,KAAI,CAAC,WAAW,KAAK,EAAE;AACrB,SAAO,KAAK,kCAAkC;AAC9C;;AAIF,QAAO,SAAS;AAChB,QAAO,IAAI,oBAAoB,KAAK,CAAC;AACrC,QAAO,SAAS;AAChB,QAAO,KAAK,YAAY,kBAAkB,KAAK,GAAG;CAGlD,MAAM,qBAAqB,KAAK,gBAAgB,QAAQ,WAAW,OAAO,YAAY;AACtF,KAAI,mBAAmB,SAAS,GAAG;AACjC,OAAK,MAAM,UAAU,oBAAoB;AACvC,UAAO,SAAS;AAChB,UAAO,MAAM,uBAAuB,OAAO,SAAS,GAAG,OAAO,YAAY;AAC1E,UAAO,MAAM,KAAK,OAAO,SAAS;;AAIpC,MAAI,mBAAmB,MAAM,WAAW,OAAO,kBAAkB,EAAE;AACjE,UAAO,SAAS;AAChB,UAAO,KAAK,2DAA2D;AACvE,UAAO,KAAK,4DAA4D;AACxE,UAAO,KAAK,8DAA8D;AAC1E,UAAO,KAAK,sCAAsC;AAClD,UAAO,KAAK,yEAAyE;AACrF,UAAO,KAAK,gFAAgF;;EAG9F,MAAM,UAAU,mBACb,KAAK,MAAM,OAAO,EAAE,SAAS,GAAG,EAAE,UAAU,IAAI,EAAE,SAAS,CAC3D,KAAK,KAAK;AACb,QAAM,IAAI,MAAM,yCAAyC,UAAU;;AAIrE,KAAI,KAAK,oBAAoB;AAC3B,SAAO,SAAS;AAChB,SAAO,KAAK,sBAAsB,KAAK,gBAAgB,CAAC;AAExD,MAAI,CAAC,QAAQ,KAAK;AAOhB,OANqB,MAAM,OAAO,OAAO,kCAAkC;IACzE,MAAM;IACN,SAAS;IACT,QAAQ;IACT,CAAC,KAEmB,MAAM;AACzB,WAAO,KAAK,kCAAkC;AAC9C;;AAEF,UAAO,SAAS;;;CAQpB,MAAM,SAAS,MAAM,kBACnB,MACA,eALsB,uBAAuB,cAAc,EAO3D,kBACA,QAAQ,KACT;AAED,QAAO,QACL,uBAAuB,OAAO,KAAK,OAAO,gBAAgB,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,GACvF;AACD,QAAO,KAAK,gBAAgB,OAAO,eAAe;AAElD,KAAI,OAAO,iBAAiB;AAC1B,SAAO,KAAK,uBAAuB,OAAO,kBAAkB;AAC5D,MAAI,OAAO,gBACT,QAAO,KAAK,eAAe,OAAO,kBAAkB;AAEtD,SAAO,SAAS;AAChB,SAAO,IAAI,yDAAyD;AACpE,SAAO,IAAI,uEAAuE;EAElF,MAAM,SAAS,4BAA4B;AAC3C,MAAI,CAAC,OACH;AAGF,MAAI;AACF,SAAMD,GAAW,OAAO,OAAO,gBAAgB;UACzC;AACN;;AAGF,SAAO,SAAS;AAChB,SAAO,KAAK,WAAW,KAAK,SAAS,OAAO,gBAAgB,CAAC,MAAM,OAAO,KAAK;AAE/E,MAAI;AACF,SAAM,uBAAuB,OAAO,gBAAgB;UAC9C;AACN;;;;;;;AAQN,MAAa,kBAAkB,cAAc;CAC3C,MAAM;CACN,aACE;CACF,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAC/B,OAAO;GACP,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE,EACpC,aAAa,8CACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,QAAM,SAAS;GACb,YAAY,KAAK;GACjB,MAAM,KAAK;GACX,KAAK,KAAK;GACV,MAAM,KAAK;GACZ,CAAC;GACF;CACH,CAAC;;;;;;;;;;AC5VF,SAAgB,qBAAqB,QAAgC;CACnE,MAAM,6BAAa,IAAI,KAAa;AAGpC,KAAI,OAAO,GACT,MAAK,MAAM,CAAC,kBAAkB,OAAO,QAAQ,OAAO,GAAG,CACrD,YAAW,IAAI,cAAc;AAIjC,QAAO,MAAM,KAAK,WAAW;;;;;;;;;;ACA/B,eAAsB,sBACpB,MAC8B;CAC9B,MAAM,4CAA4B,IAAI,KAAuB;AAC7D,MAAK,MAAM,YAAY,KAAK,WAAW;EACrC,MAAM,MAAM,SAAS,aAAa;EAClC,MAAM,WAAW,0BAA0B,IAAI,IAAI;AACnD,MAAI,UAAU;AACZ,YAAS,KAAK,SAAS;AACvB;;AAEF,4BAA0B,IAAI,KAAK,CAAC,SAAS,CAAC;;CAGhD,MAAM,kBAAkB,IAAI,IAAI,KAAK,UAAU;CAC/C,MAAM,mCAAmB,IAAI,KAAqB;AAElD,MAAK,MAAM,aAAa,KAAK,YAAY;AACvC,MAAI,gBAAgB,SAAS,EAC3B;AAGF,MAAI;GACF,MAAM,EAAE,kBAAkB,MAAM,KAAK,OAAO,kBAAkB;IAC5D,aAAa,KAAK;IAClB,eAAe;IAChB,CAAC;AAEF,QAAK,MAAM,QAAQ,eAAe;IAChC,MAAM,wBAAwB,0BAA0B,IAAI,KAAK,KAAK,aAAa,CAAC;AACpF,QAAI,CAAC,sBACH;AAGF,SAAK,MAAM,qBAAqB,uBAAuB;AACrD,SAAI,iBAAiB,IAAI,kBAAkB,CACzC;AAEF,sBAAiB,IAAI,mBAAmB,UAAU;AAClD,qBAAgB,OAAO,kBAAkB;;;UAGvC;AACN;;;AAIJ,QAAO;;;;;ACpCT,eAAe,mBACb,SACA,QACe;AACf,OAAM,OAAO,qBAAqB;EAChC,aAAa,QAAQ;EACrB,eAAe,QAAQ;EACvB,kBAAkB,QAAQ;EAC3B,CAAC;AAEF,QAAO,QAAQ,mBAAmB,QAAQ,SAAS,kBAAkB,QAAQ,cAAc,GAAG;;AAGhG,eAAe,kBACb,aACA,eACA,QACe;AACf,OAAM,OAAO,sBAAsB;EACjC;EACA;EACD,CAAC;AAEF,QAAO,QAAQ,qCAAqC,cAAc,GAAG;;;;;;;AAQvE,eAAsB,SAAS,SAA0C;AACvE,QAAO,MAAM,UAAU;EAAE,GAAG;EAAS,KAAK;EAAM,CAAC;;AAGnD,eAAe,UAAU,SAAkD;CAMzE,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;CAGF,MAAM,WAAW,SAAS,SAAS,QAAQ,MAAM,SAAS;CAC1D,MAAM,eAAe,CAAC,CAAC,SAAS;CAChC,MAAM,SAAS,CAAC,CAAC,SAAS;CAG1B,MAAM,cAAc;EAAC;EAAQ;EAAc;EAAS,CAAC,OAAO,QAAQ,CAAC;AACrE,KAAI,gBAAgB,EAClB,OAAM,IAAI,MAAM,kEAAkE;AAEpF,KAAI,cAAc,EAChB,OAAM,IAAI,MACR,8FACD;CAIH,MAAM,EAAE,WAAW,MAAM,WAAW,SAAS,WAAW;CACxD,MAAM,aAAa,qBAAqB,OAAO;AAG/C,KAAI,QAAQ;AACV,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAO,KAAK,sCAAsC;AAClD;;AAGF,MAAI,CAAC,SAAS,KAAK;GACjB,MAAM,gBAAgB,WAAW,KAAK,KAAK;AAQ3C,OAAI,CAPiB,MAAM,OAAO,OAChC,8DAA8D,cAAc,uBAC5E;IACE,MAAM;IACN,SAAS;IACV,CACF,EACkB;AACjB,WAAO,KAAK,sBAAsB;AAClC;;;AAIJ,OAAK,MAAM,aAAa,WACtB,OAAM,kBAAkB,aAAa,WAAW,OAAO;AAEzD,SAAO,QAAQ,yCAAyC;AACxD;;AAIF,KAAI,gBAAgB,SAAS,WAAW;EACtC,MAAM,YAAY,QAAQ;AAG1B,MAAI,CAAC,WAAW,SAAS,UAAU,CACjC,OAAM,IAAI,MACR,cAAc,UAAU,+CAA+C,WAAW,KAAK,KAAK,GAC7F;AAGH,MAAI,CAAC,QAAQ,KAQX;OAAI,CAPiB,MAAM,OAAO,OAChC,+CAA+C,UAAU,wBACzD;IACE,MAAM;IACN,SAAS;IACV,CACF,EACkB;AACjB,WAAO,KAAK,sBAAsB;AAClC;;;AAIJ,QAAM,kBAAkB,aAAa,WAAW,OAAO;AACvD;;AAIF,KAAI,YAAY,SAAS,OAAO;EAC9B,MAAM,YAAY,QAAQ;EAG1B,MAAM,mBAAmB,MAAM,sBAAsB;GACnD;GACA;GACA;GACA;GACD,CAAC;EACF,MAAM,gBAAgB,UAAU,QAAQ,aAAa,CAAC,iBAAiB,IAAI,SAAS,CAAC;AAErF,MAAI,cAAc,SAAS,EACzB,OAAM,IAAI,MACR,wDAAwD,cAAc,KAAK,KAAK,GACjF;AAGH,MAAI,CAAC,QAAQ,KAAK;GAChB,MAAM,WAAW,UAAU,KAAK,KAAK;AAQrC,OAAI,CAPiB,MAAM,OAAO,OAChC,2CAA2C,SAAS,uBACpD;IACE,MAAM;IACN,SAAS;IACV,CACF,EACkB;AACjB,WAAO,KAAK,sBAAsB;AAClC;;;AAIJ,OAAK,MAAM,YAAY,WAAW;GAChC,MAAM,YAAY,iBAAiB,IAAI,SAAS;AAChD,OAAI,CAAC,UACH;AAGF,SAAM,mBACJ;IACE;IACA,eAAe;IACf;IACD,EACD,OACD;;;;AAKP,MAAa,kBAAkB,cAAc;CAC3C,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,OAAO,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE;GACxC,YAAY;GACZ,aAAa;GACd,CAAC;EACF,KAAK,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACnC,OAAO;GACP,aAAa;GACd,CAAC;EACF,WAAW,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,QAAQ,KAAK,SAAS,KAAK,MAAM,SAAS,IAAI,KAAK,QAAQ;AACjE,QAAM,UAAU;GACd,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GACjB,KAAK,KAAK;GACV,WAAW,KAAK;GAChB;GACA,KAAK,KAAK;GACX,CAAC;GACF;CACH,CAAC;;;;;;;;;AC9NF,eAAsB,cAAc,SAA6D;CAK/F,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,SAAS;EACnB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,SAAS;EACtB,SAAS,SAAS;EACnB,CAAC;AAWF,SATkB,MAAM,SAAS,OAAO,WAAW,gBAAgB;EACjE,MAAM,EAAE,WAAW,kBAAkB,MAAM,OAAO,cAAc;GAC9D;GACA;GACA,UAAU;GACX,CAAC;AACF,SAAO,CAAC,WAAW,cAAc;GACjC,EAEe,IAAI,mBAAmB;;AAG1C,MAAaE,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,YAAY,MAAM,cAAc;GACpC,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;AAEF,MAAI,UAAU,WAAW,KAAK,CAAC,KAAK,MAAM;AACxC,UAAO,KAAK,sBAAsB;AAClC;;AAEF,SAAO,IAAI,UAAU;GACrB;CACH,CAAC;;;;;;;;;AC5BF,eAAsB,eACpB,SACuC;CAKvC,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,QAAQ;EAClB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,QAAQ;EACrB,SAAS,QAAQ;EAClB,CAAC;AAEF,KAAI;EACF,MAAM,EAAE,gBAAgB,MAAM,OAAO,mBAAmB;GACtD;GACA,aAAa,QAAQ;GACtB,CAAC;AAEF,SAAO;GACL;GACA,OAAO,gBACL,iBAAiB;IACf;IACA;IACA;IACA,UAAU,QAAQ,YAAY;IAC9B,cAAc,aAAa;IAC5B,CAAC;GACL;UACM,OAAO;AACd,MAAI,iBAAiB,cAAc;AACjC,OAAI,MAAM,SAAS,KAAK,SACtB,OAAM,IAAI,MAAM,cAAc,QAAQ,YAAY,cAAc;AAElE,OAAI,MAAM,SAAS,KAAK,mBACtB,OAAM,IAAI,MAAM,cAAc,QAAQ,YAAY,gCAAgC;;AAGtF,QAAM;;;AAIV,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,aAAa,IAAI,EAAE,QAAQ,EAAE;GAC3B,YAAY;GACZ,aAAa;GACd,CAAC;EACF,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,EAAE,aAAa,SAAS,MAAM,eAAe;GACjD,aAAa,KAAK;GAClB,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,UAAU,cAAc,KAAK,SAAS;GACvC,CAAC;AAEF,MAAI,CAAC,KAAK,KACR,QAAO,KAAK,iBAAiB,eAAe,EAAE,MAAM,UAAU,CAAC;AAGjE,MAAI,KAAK,MAAM;GACb,MAAM,SAAS,MAAM,KAAK,EAAE,cAAc,CAAC,KAAK,MAAM,CAAC;AACvD,OAAI,KAAK,QAAQ,CAAC,KAAK,MAAM;IAC3B,MAAM,EAAE,cAAc,MAAM,qBAAqB;KAC/C;KACA,aAAa,KAAK;KAClB,SAAS,KAAK;KACd,MAAM;KACP,CAAC;AACF,2BAAuB,UAAU;SAEjC,QAAO,IAAI,OAAO;QAGpB,QAAO,IAAI,EAAE,aAAa,CAAC;GAE7B;CACH,CAAC;;;;ACjGF,MAAM,kBACJ,WACW;AACX,SAAQ,QAAR;EACE,KAAK,iEAAiE,GACpE,QAAO;EACT,KAAK,iEAAiE,kBACpE,QAAO;EACT,QACE,QAAO;;;AAIb,MAAM,yBAAyB,WAAyD;AACtF,SAAQ,QAAR;EACE,KAAK,qCAAqC,UACxC,QAAO;EACT,KAAK,qCAAqC,OACxC,QAAO;EACT,QACE,QAAO;;;AAIb,MAAa,WAAW,QAA8B;AACpD,QAAO;EACL,MAAM,IAAI;EACV,QAAQ,IAAI;EACZ,eAAe,IAAI;EACnB,WAAW,gBAAgB,IAAI,WAAW;EAC1C,WAAW,gBAAgB,IAAI,WAAW;EAC3C;;AAGH,MAAa,iBACX,MACA,WACkB;CAClB,MAAM,UAAU,OAAO;AACvB,QAAO;EACL;EACA,QAAQ,eAAe,OAAO,OAAO;EACrC,+BAA+B,gBAAgB,OAAO,+BAA+B;EACrF,mBAAmB,UAAU,sBAAsB,QAAQ,OAAO,GAAG;EACrE,eAAe,gBAAgB,SAAS,YAAY;EACpD,kBAAkB,SAAS,SAAS;EACrC;;;;;AC9DH,MAAM,sBAAsB,EAAE,OAAO;CACnC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,MAAM,EAAE,QAAQ,CAAC,IAAI,GAAG,EAAE,SAAS,oBAAoB,CAAC;CACzD,CAAC;AAIF,eAAeC,cAAY,SAAwB;CACjD,MAAM,SAAS,oBAAoB,UAAU,QAAQ;AACrD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,MAAM,OAAO,KAAK;EACnB;;;;;;;AAQH,eAAsB,aAAa,SAAgD;CACjF,MAAM,EAAE,QAAQ,aAAa,SAAS,MAAMA,cAAY,QAAQ;AAOhE,QAAO,cAAc,MALJ,MAAM,OAAO,2BAA2B;EACvD;EACA,iBAAiB;EAClB,CAAC,CAEkC;;AAGtC,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,EAAE;GACpB,aAAa;GACb,OAAO;GACR,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,SAAS,MAAM,aAAa;GAChC,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,MAAM,KAAK;GACZ,CAAC;EAEF,MAAM,kBAAkB,KAAK,OACzB,SACA;GACE,GAAG;GACH,+BAA+B,qBAAqB,OAAO,8BAA8B;GACzF,eAAe,qBAAqB,OAAO,cAAc;GAC1D;AAEL,SAAO,IAAI,gBAAgB;GAC3B;CACH,CAAC;;;;ACrEF,MAAM,wBAAwB,EAAE,OAAO;CACrC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,OAAO,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU;CACrD,CAAC;AAIF,eAAeC,cAAY,SAA0B;CACnD,MAAM,SAAS,sBAAsB,UAAU,QAAQ;AACvD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,OAAO,OAAO,KAAK;EACpB;;;;;;;AAQH,eAAsB,SAAS,SAA8C;CAC3E,MAAM,EAAE,QAAQ,aAAa,UAAU,MAAMA,cAAY,QAAQ;CACjE,MAAM,WAAW,UAAU;CAE3B,MAAM,UAAqB,EAAE;CAC7B,IAAI,YAAY;AAEhB,QAAO,MAAM;AACX,MAAI,YAAY,QAAQ,UAAU,MAChC;EAGF,MAAM,YAAY,WAAW,QAAS,QAAQ,SAAS;EACvD,MAAM,WAAW,cAAc,UAAa,YAAY,IAAI,YAAY;EAExE,MAAM,EAAE,cAAc,kBAAkB,MAAM,OAAO,iBAAiB;GACpE;GACA;GACA,GAAI,aAAa,SAAY,EAAE,UAAU,GAAG,EAAE;GAC/C,CAAC;EAEF,MAAM,SAAS,aAAa,IAAI,QAAQ;AAExC,MAAI,cAAc,UAAa,OAAO,SAAS,UAC7C,SAAQ,KAAK,GAAG,OAAO,MAAM,GAAG,UAAU,CAAC;MAE3C,SAAQ,KAAK,GAAG,OAAO;AAGzB,MAAI,CAAC,cACH;AAEF,cAAY;;AAGd,QAAO;;AAGT,MAAaC,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,OAAO,IAAI,eAAe,UAAU,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,OAAO,MAAM,SAAS;GAC1B,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,OAAO,KAAK;GACb,CAAC;EAEF,MAAM,gBAAgB,KAAK,OACvB,OACA,KAAK,KAAK,EAAE,WAAW,GAAG,WAAW,GAAG,YAAY;GAClD,GAAG;GACH,WAAW,qBAAqB,UAAU;GAC3C,EAAE;AAEP,SAAO,IAAI,cAAc;GACzB;CACH,CAAC;;;;ACnGF,MAAa,iBAAiB,cAAwC;AACpE,QAAO;EACL,IAAI,UAAU;EACd,MAAM,UAAU;EAChB,QAAQ,UAAU;EAClB,WAAW,gBAAgB,UAAU,WAAW;EAChD,WAAW,gBAAgB,UAAU,WAAW;EACjD;;AAGH,MAAa,oBAAoB,cAA2C;AAC1E,QAAO;EACL,GAAG,cAAc,UAAU;EAC3B,kBAAkB,UAAU;EAC5B,gBAAgB,UAAU;EAC1B,UAAU,UAAU;EACrB;;;;;;;;;;ACbH,MAAM,+BAA+B,EAAE,OAAO;CAC5C,MAAM,EACH,QAAQ,CACR,IAAI,GAAG,qCAAqC,CAC5C,IAAI,IAAI,qCAAqC,CAC7C,MAAM,gBAAgB,gEAAgE,CACtF,QACE,MAAM,CAAC,EAAE,WAAW,IAAI,IAAI,CAAC,EAAE,SAAS,IAAI,EAC7C,yCACD;CACH,QAAQ,EAAE,QAAQ;CAClB,kBAAkB,EAAE,SAAS,CAAC,UAAU;CACxC,gBAAgB,EAAE,MAAM,CAAC,UAAU;CACnC,UAAU,EAAE,MAAM,CAAC,UAAU;CAC9B,CAAC;AAIF,MAAM,iBAAiB,OAAO,QAAgB,WAA2B;CACvE,MAAM,mBAAmB,MAAM,OAAO,8BAA8B,EAAE,CAAC;AACvE,KAAI,CAAC,iBAAiB,QAAQ,SAAS,OAAO,CAC5C,OAAM,IAAI,MAAM,0BAA0B,iBAAiB,QAAQ,KAAK,KAAK,CAAC,GAAG;;;;;;;AASrF,eAAsB,gBAAgB,SAAyD;CAE7F,MAAM,SAAS,6BAA6B,UAAU,QAAQ;AAC9D,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;CAEjD,MAAM,YAAY,OAAO;CAIzB,MAAM,SAAS,MAAM,mBADD,MAAM,iBAAiB,CACS;AACpD,OAAM,eAAe,UAAU,QAAQ,OAAO;CAG9C,MAAM,iBAAiB,mBAAmB,UAAU,eAAe;CACnE,MAAM,WAAW,aAAa,UAAU,SAAS;AAWjD,QAAO,eARM,MAAM,OAAO,gBAAgB;EACxC,eAAe,UAAU;EACzB,iBAAiB,UAAU;EAC3B,kBAAkB,UAAU,oBAAoB;EAChD;EACA;EACD,CAAC,EAEwB,UAAW;;AAGvC,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,MAAM,IAAI,EAAE,QAAQ,EAAE;GACpB,OAAO;GACP,aAAa;GACd,CAAC;EACF,QAAQ,IAAI,EAAE,QAAQ,EAAE;GACtB,OAAO;GACP,aAAa;GACd,CAAC;EACF,qBAAqB,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE;GACnD,OAAO;GACP,aAAa;GACd,CAAC;EACF,mBAAmB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAC5C,OAAO;GACP,aAAa;GACd,CAAC;EACF,aAAa,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACtC,OAAO;GACP,aAAa;GACd,CAAC;EACF,gBAAgB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GACzC,OAAO;GACP,aAAa;GACd,CAAC;EACF,gBAAgB,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE,EACzC,aAAa,yDACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAElC,MAAM,YAAY,MAAM,gBAAgB;GACtC,MAAM,KAAK;GACX,QAAQ,KAAK;GACb,kBAAkB,KAAK;GACvB,gBAAgB,KAAK;GACrB,UAAU,KAAK;GAChB,CAAC;EAEF,IAAI;EACJ,MAAM,cAAc,KAAK;AACzB,MAAI,aAAa;GACf,MAAM,SAAS,oBAAoB;AACnC,OAAI,OAAO,SAAS,aAClB,OAAM,IAAI,MAAM,YAAY,YAAY,mBAAmB;GAG7D,MAAM,cAAc,KAAK,mBAAmB,OAAO;AACnD,OAAI,CAAC,YACH,OAAM,IAAI,MACR,sFACD;AAGH,OAAI,CAAC,OAAO,MAAM,aAChB,OAAM,IAAI,MACR,SAAS,YAAY,wFACtB;AAEH,UAAO,SAAS,eAAe;IAC7B,MAAM;IACN,cAAc,UAAU;IACzB;AACD,uBAAoB,OAAO;AAC3B,iBAAc;IACZ,MAAM;IACN,MAAM;IACN,aAAa,UAAU;IACxB;AAED,OAAI,CAAC,KAAK,KACR,QAAO,QAAQ,YAAY,YAAY,yBAAyB;;AAIpE,MAAI,CAAC,KAAK,KACR,QAAO,QAAQ,cAAc,KAAK,KAAK,yBAAyB;AAGlE,MAAI,KAAK,QAAQ,aAAa;AAC5B,UAAO,IAAI;IAAE,GAAG;IAAW,SAAS;IAAa,CAAC;AAClD;;AAGF,SAAO,IAAI,UAAU;AACrB,MAAI,aAAa;AACf,UAAO,IAAI,WAAW;AACtB,UAAO,IAAI,YAAY;;GAEzB;CACH,CAAC;;;;ACzKF,MAAM,+BAA+B,EAAE,OAAO,EAC5C,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,EACtE,CAAC;AAIF,eAAeC,cAAY,SAAiC;CAE1D,MAAM,SAAS,6BAA6B,UAAU,QAAQ;AAC9D,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAMjD,QAAO;EACL,QAHa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAIlD,aAAa,OAAO,KAAK;EAC1B;;;;;;;AAQH,eAAsB,gBAAgB,SAAgD;CAEpF,MAAM,EAAE,QAAQ,gBAAgB,MAAMA,cAAY,QAAQ;AAG1D,OAAM,OAAO,gBAAgB,EAC3B,aACD,CAAC;;AAGJ,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,gBAAgB,IAAI,EAAE,QAAQ,EAAE;GAC9B,OAAO;GACP,aAAa;GACd,CAAC;EACF,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAElC,MAAM,EAAE,QAAQ,gBAAgB,MAAMA,cAAY,EAChD,aAAa,KAAK,iBACnB,CAAC;EAGF,IAAI;AACJ,MAAI;AACF,eAAY,MAAM,OAAO,aAAa,EACpC,aACD,CAAC;UACI;AACN,SAAM,IAAI,MAAM,cAAc,YAAY,cAAc;;AAI1D,MAAI,CAAC,KAAK,KAOR;OANqB,MAAM,OAAO,OAChC,iDAAiD,UAAU,WAAW,KAAK,KAC3E,EACE,MAAM,QACP,CACF,KACoB,UAAU,WAAW,MAAM;AAC9C,WAAO,KAAK,gCAAgC;AAC5C;;;AAKJ,QAAM,OAAO,gBAAgB,EAC3B,aACD,CAAC;EAGF,MAAM,WAAW,oBAAoB;EACrC,MAAM,mBAAmB,OAAO,QAAQ,SAAS,SAAS,CAAC,QACxD,GAAG,aAAa,SAAS,iBAAiB,YAC5C;AACD,MAAI,iBAAiB,SAAS,GAAG;AAC/B,QAAK,MAAM,CAAC,gBAAgB,iBAC1B,QAAO,SAAS,SAAS;AAE3B,uBAAoB,SAAS;;AAI/B,MAAI,iBAAiB,SAAS,EAC5B,QAAO,QACL,cAAc,KAAK,gBAAgB,QAAQ,iBAAiB,OAAO,8CACpE;MAED,QAAO,QAAQ,cAAc,KAAK,gBAAgB,yBAAyB;GAE7E;CACH,CAAC;;;;ACxGF,MAAM,4BAA4B,EAAE,OAAO;CACzC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC/B,CAAC;AAIF,eAAeC,cAAY,SAA8B;CACvD,MAAM,SAAS,0BAA0B,UAAU,QAAQ;AAC3D,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKD;;;;;;;AAQH,eAAsB,aAAa,SAAyD;CAC1F,MAAM,EAAE,QAAQ,gBAAgB,MAAMA,cAAY,QAAQ;CAE1D,MAAM,WAAW,MAAM,OAAO,aAAa,EACzC,aACD,CAAC;AAEF,KAAI,CAAC,SAAS,UACZ,OAAM,IAAI,MAAM,cAAc,YAAY,cAAc;AAG1D,QAAO,iBAAiB,SAAS,UAAU;;AAG7C,MAAa,aAAa,cAAc;CACtC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,YAAY,MAAM,aAAa;GACnC,aAAa,KAAK;GAClB,SAAS,KAAK;GACf,CAAC;EAEF,MAAM,qBAAqB,KAAK,OAC5B,YACA;GACE,GAAG;GACH,WAAW,qBAAqB,UAAU,UAAU;GACpD,WAAW,qBAAqB,UAAU,UAAU;GACrD;AAEL,SAAO,IAAI,mBAAmB;GAC9B;CACH,CAAC;;;;;;;;;AC/DF,eAAsB,eAAe,SAA2D;CAC9F,MAAM,QAAQ,SAAS;CACvB,MAAM,WAAW,UAAU;CAI3B,MAAM,SAAS,MAAM,mBADD,MAAM,iBAAiB,CACS;CAEpD,MAAM,UAA2B,EAAE;CACnC,IAAI,YAAY;AAOhB,QAAO,MAAM;AACX,MAAI,YAAY,QAAQ,UAAU,MAChC;EAGF,MAAM,YAAY,WAAW,QAAS,QAAQ,SAAS;EACvD,MAAM,WAAW,cAAc,UAAa,YAAY,IAAI,YAAY;EAExE,MAAM,EAAE,YAAY,kBAAkB,MAAM,OAAO,eAAe;GAChE;GACA,GAAI,aAAa,SAAY,EAAE,UAAU,GAAG,EAAE;GAC/C,CAAC;EAEF,MAAM,SAAS,WAAW,IAAI,cAAc;AAE5C,MAAI,cAAc,UAAa,OAAO,SAAS,UAC7C,SAAQ,KAAK,GAAG,OAAO,MAAM,GAAG,UAAU,CAAC;MAE3C,SAAQ,KAAK,GAAG,OAAO;AAGzB,MAAI,CAAC,cACH;AAEF,cAAY;;AAGd,QAAO;;AAGT,MAAaC,gBAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,OAAO,IAAI,eAAe,UAAU,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,aAAa,MAAM,eAAe,EAAE,OAAO,KAAK,OAAO,CAAC;AAC9D,SAAO,IAAI,YAAY,EAAE,SAAS,EAAE,WAAW,MAAM,EAAE,CAAC;GACxD;CACH,CAAC;;;;ACzEF,MAAM,gCAAgC,EAAE,OAAO,EAC7C,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,EACtE,CAAC;AAIF,eAAeC,cAAY,SAAkC;CAC3D,MAAM,SAAS,8BAA8B,UAAU,QAAQ;AAC/D,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAMjD,QAAO;EACL,QAHa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAIlD,aAAa,OAAO,KAAK;EAC1B;;;;;;;AAQH,eAAsB,iBAAiB,SAAiD;CACtF,MAAM,EAAE,QAAQ,gBAAgB,MAAMA,cAAY,QAAQ;AAE1D,OAAM,OAAO,iBAAiB,EAC5B,aACD,CAAC;;AAGJ,MAAa,iBAAiB,cAAc;CAC1C,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,gBAAgB,IAAI,EAAE,QAAQ,EAAE;GAC9B,OAAO;GACP,aAAa;GACd,CAAC;EACF,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,EAAE,QAAQ,gBAAgB,MAAMA,cAAY,EAChD,aAAa,KAAK,iBACnB,CAAC;AAEF,MAAI,CAAC,KAAK,KAOR;OANqB,MAAM,OAAO,OAChC,+CAA+C,YAAY,eAC3D,EACE,MAAM,QACP,CACF,KACoB,OAAO;AAC1B,WAAO,KAAK,mCAAmC;AAC/C;;;AAIJ,QAAM,OAAO,iBAAiB,EAC5B,aACD,CAAC;AAEF,SAAO,QAAQ,cAAc,YAAY,0BAA0B;GACnE;CACH,CAAC;;;;ACrEF,MAAM,gBAAgB,SAA4C;AAChE,SAAQ,MAAR;EACE,KAAK,0BAA0B,MAC7B,QAAO;EACT,KAAK,0BAA0B,OAC7B,QAAO;EACT,KAAK,0BAA0B,OAC7B,QAAO;EACT,QACE,QAAO;;;AAIb,MAAa,gBAAgB,SAA4C;AACvE,SAAQ,KAAK,aAAa,EAA1B;EACE,KAAK,QACH,QAAO,0BAA0B;EACnC,KAAK,SACH,QAAO,0BAA0B;EACnC,KAAK,SACH,QAAO,0BAA0B;EACnC,QACE,OAAM,IAAI,MAAM,iBAAiB,KAAK,sCAAsC;;;AAIlF,MAAa,YAAY,SAA0C;AACjE,QAAO;EACL,QAAQ,KAAK,cAAc,UAAU;EACrC,OAAO,KAAK,cAAc,SAAS;EACnC,MAAM,aAAa,KAAK,KAAK;EAC9B;;AAGH,MAAa,aAAa;CAAC;CAAS;CAAU;CAAS;;;;ACnCvD,MAAM,0BAA0B,EAAE,OAAO;CACvC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,OAAO,EAAE,MAAM,EAAE,SAAS,uCAAuC,CAAC;CAClE,MAAM,EAAE,KAAK,YAAY,EAAE,SAAS,wBAAwB,WAAW,KAAK,KAAK,IAAI,CAAC;CACvF,CAAC;AAIF,eAAeC,cAAY,SAA4B;CACrD,MAAM,SAAS,wBAAwB,UAAU,QAAQ;AACzD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,OAAO,OAAO,KAAK;EACnB,MAAM,aAAa,OAAO,KAAK,KAAK;EACrC;;;;;;;AAQH,eAAsB,WAAW,SAA2C;CAC1E,MAAM,EAAE,QAAQ,aAAa,OAAO,SAAS,MAAMA,cAAY,QAAQ;AAEvE,OAAM,OAAO,4BAA4B;EACvC;EACA;EACA;EACD,CAAC;;AAGJ,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,OAAO,IAAI,EAAE,OAAO,EAAE,EACpB,aAAa,uCACd,CAAC;EACF,MAAM,IAAI,EAAE,KAAK,WAAW,EAAE;GAC5B,aAAa,mBAAmB,WAAW,KAAK,KAAK,CAAC;GACtD,OAAO;GACR,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,QAAM,WAAW;GACf,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,OAAO,KAAK;GACZ,MAAM,KAAK;GACZ,CAAC;AAEF,SAAO,QAAQ,SAAS,KAAK,MAAM,oCAAoC,KAAK,KAAK,IAAI;GACrF;CACH,CAAC;;;;ACjEF,MAAM,yBAAyB,EAAE,OAAO;CACtC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,OAAO,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU;CACrD,CAAC;AAIF,eAAeC,cAAY,SAA2B;CACpD,MAAM,SAAS,uBAAuB,UAAU,QAAQ;AACxD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,OAAO,OAAO,KAAK;EACpB;;;;;;;AAQH,eAAsB,UAAU,SAAgD;CAC9E,MAAM,EAAE,QAAQ,aAAa,UAAU,MAAMA,cAAY,QAAQ;CACjE,MAAM,WAAW,UAAU;CAE3B,MAAM,UAAsB,EAAE;CAC9B,IAAI,YAAY;AAEhB,QAAO,MAAM;AACX,MAAI,YAAY,QAAQ,UAAU,MAChC;EAGF,MAAM,YAAY,WAAW,QAAS,QAAQ,SAAS;EACvD,MAAM,WAAW,cAAc,UAAa,YAAY,IAAI,YAAY;EAExE,MAAM,EAAE,wBAAwB,kBAAkB,MAAM,OAAO,2BAA2B;GACxF;GACA;GACA,GAAI,aAAa,SAAY,EAAE,UAAU,GAAG,EAAE;GAC/C,CAAC;EAEF,MAAM,SAAS,uBAAuB,IAAI,SAAS;AAEnD,MAAI,cAAc,UAAa,OAAO,SAAS,UAC7C,SAAQ,KAAK,GAAG,OAAO,MAAM,GAAG,UAAU,CAAC;MAE3C,SAAQ,KAAK,GAAG,OAAO;AAGzB,MAAI,CAAC,cACH;AAEF,cAAY;;AAGd,QAAO;;AAGT,MAAa,cAAc,cAAc;CACvC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,OAAO,IAAI,eAAe,UAAU,EAAE;GACpC,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,QAAQ,MAAM,UAAU;GAC5B,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,OAAO,KAAK;GACb,CAAC;AAEF,SAAO,IAAI,MAAM;GACjB;CACH,CAAC;;;;ACrGF,MAAM,0BAA0B,EAAE,OAAO;CACvC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,OAAO,EAAE,QAAQ,CAAC,MAAM,EAAE,SAAS,uCAAuC,CAAC;CAC5E,CAAC;AAIF,eAAeC,cAAY,SAA4B;CACrD,MAAM,SAAS,wBAAwB,UAAU,QAAQ;AACzD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,OAAO,OAAO,KAAK;EACpB;;;;;;;AAQH,eAAsB,WAAW,SAA2C;CAC1E,MAAM,EAAE,QAAQ,aAAa,UAAU,MAAMA,cAAY,QAAQ;AAEjE,OAAM,OAAO,4BAA4B;EACvC;EACA;EACD,CAAC;;AAGJ,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,OAAO,IAAI,EAAE,OAAO,EAAE,EACpB,aAAa,uCACd,CAAC;EACF,GAAG;EACJ,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,MAAI,CAAC,KAAK,KAOR;OANqB,MAAM,OAAO,OAChC,yCAAyC,KAAK,MAAM,kCACpD,EACE,MAAM,QACP,CACF,KACoB,OAAO;AAC1B,WAAO,KAAK,0BAA0B;AACtC;;;AAIJ,QAAM,WAAW;GACf,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,OAAO,KAAK;GACb,CAAC;AAEF,SAAO,QAAQ,SAAS,KAAK,MAAM,2BAA2B;GAC9D;CACH,CAAC;;;;AC5EF,MAAM,0BAA0B,EAAE,OAAO;CACvC,aAAa,EAAE,KAAK,EAAE,SAAS,qCAAqC,CAAC,CAAC,UAAU;CAChF,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,OAAO,EAAE,QAAQ,CAAC,MAAM,EAAE,SAAS,uCAAuC,CAAC;CAC3E,MAAM,EAAE,KAAK,YAAY,EAAE,SAAS,wBAAwB,WAAW,KAAK,KAAK,IAAI,CAAC;CACvF,CAAC;AAIF,eAAeC,cAAY,SAA4B;CACrD,MAAM,SAAS,wBAAwB,UAAU,QAAQ;AACzD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAUjD,QAAO;EACL,QAPa,MAAM,mBADD,MAAM,iBAAiB,CACS;EAQlD,aAPkB,gBAAgB;GAClC,aAAa,OAAO,KAAK;GACzB,SAAS,OAAO,KAAK;GACtB,CAAC;EAKA,OAAO,OAAO,KAAK;EACnB,MAAM,aAAa,OAAO,KAAK,KAAK;EACrC;;;;;;;AAQH,eAAsB,WAAW,SAA2C;CAC1E,MAAM,EAAE,QAAQ,aAAa,OAAO,SAAS,MAAMA,cAAY,QAAQ;AAEvE,OAAM,OAAO,4BAA4B;EACvC;EACA;EACA;EACD,CAAC;;AAGJ,MAAa,gBAAgB,cAAc;CACzC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,OAAO,IAAI,EAAE,OAAO,EAAE,EACpB,aAAa,uCACd,CAAC;EACF,MAAM,IAAI,EAAE,KAAK,WAAW,EAAE;GAC5B,aAAa,uBAAuB,WAAW,KAAK,KAAK,CAAC;GAC1D,OAAO;GACR,CAAC;EACH,CAAC,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;AAClC,QAAM,WAAW;GACf,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,OAAO,KAAK;GACZ,MAAM,KAAK;GACZ,CAAC;AAEF,SAAO,QAAQ,SAAS,KAAK,MAAM,qBAAqB,KAAK,KAAK,IAAI;GACtE;CACH,CAAC;;;;ACvEF,SAAS,iBAAyB;AAChC,QAAO,EAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAgCrB,SAAS,iBAAyB;AAChC,QAAO,EAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CrB,eAAsB,kBAAkB,QAAsC;CAC5E,MAAM,YAAY,KAAK,QAAQ,YAAY,EAAE,QAAQ;AACrD,MAAG,UAAU,WAAW,EAAE,WAAW,MAAM,CAAC;CAE5C,MAAM,YAAY,KAAK,KAAK,WAAW,SAAS,OAAO,WAAW;CAClE,MAAM,aAAa,KAAK,KAAK,WAAW,SAAS,OAAO,KAAK;CAC7D,MAAM,eAAe,WAAW,QAAQ,gBAAgB,GAAG,gBAAgB;AAC3E,MAAG,cAAc,WAAW,aAAa;CAEzC,IAAI;AACJ,KAAI;AACF,aAAW,MAAM,iBAAiB;SAC5B;AACN,aAAW;;AAGb,OAAM,SAAS,MACb,SAAS,aAAa;EACpB,OAAO;EACP,QAAQ;GACN,MAAM;GACN,QAAQ;GACR,WAAW;GACX,QAAQ;GACR,sBAAsB;GACtB,SAAS,EACP,UAAU,YACX;GACF;EACD,UAAU,WAAW,QAAQ,CAAC,WAAW,GAAG,EAAE;EAC9C,SAAS,EACP,gBAAgB,CAAC,QAAQ,SAAS,EACnC;EACD;EACA,WAAW;GACT,mBAAmB;GACnB,aAAa;GACb,0BAA0B;GAC3B;EACD,UAAU;EACX,CAAC,CACH;AAED,QAAOC,KAAG,aAAa,YAAY,QAAQ;;;;;AC5H7C,SAAS,eAAe,OAAwB;AAC9C,KAAI,iBAAiB,MACnB,QAAO,MAAM;AAEf,QAAO,OAAO,MAAM;;;;;;;AAetB,SAAgB,uBAAuB,MAAyC;CAC9E,MAAM,UAAU,eAAe,KAAK,MAAM;AAE1C,KAAI,QAAQ,SAAS,8BAA8B,CACjD,QAAOC,eAAS;EACd,MAAM;EACN,SAAS,iBAAiB,KAAK,eAAe,UAAU;EACxD,YAAY;EACb,CAAC;AAGJ,KACE,KAAK,WAAW,SAChB,QAAQ,SACN,uFACD,CAED,QAAOA,eAAS;EACd,MAAM;EACN,SAAS,iDAAiD,KAAK,UAAU;EACzE,YACE;EACH,CAAC;AAGJ,KAAI,KAAK,WAAW,SAAS,QAAQ,SAAS,oCAAoC,EAAE;EAClF,MAAM,eAAe,QAClB,MAAM,oCAAoC,CAC1C,GAAG,EAAE,EACJ,MAAM,KAAK,CACZ,GAAG,EAAE,EACJ,MAAM;AAEV,SAAOA,eAAS;GACd,MAAM;GACN,SAAS;GACT,YAAY,gBAAgB;GAC7B,CAAC;;AAGJ,QAAO,KAAK,iBAAiB,QAAQ,KAAK,QAAQ,IAAI,MAAM,QAAQ;;;;;;;;;;ACvDtE,SAAgB,uBAAuB,OAAwB;AAC7D,KAAI,MAAM,MAAM,CAAC,WAAW,EAC1B,QAAO;AAGT,KAAI;AACF,UAAM,MAAM;AACZ,SAAO;SACD;AACN,SAAO;;;;;;;;;;;ACXX,SAAgB,mBAAmB,OAAwB;CACzD,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;CACpB,IAAI,oBAAoB;CACxB,IAAI,iBAAgC;CACpC,IAAI,mCAAmC;AAEvC,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;EACxC,MAAM,OAAO,MAAM;EACnB,MAAM,OAAO,MAAM,IAAI;AAEvB,MAAI,eAAe;AACjB,OAAI,SAAS,KACX,iBAAgB;AAElB;;AAGF,MAAI,oBAAoB,GAAG;AACzB,OAAI,SAAS,OAAO,SAAS,KAAK;AAChC,yBAAqB;AACrB,SAAK;AACL;;AAEF,OAAI,SAAS,OAAO,SAAS,KAAK;AAChC,yBAAqB;AACrB,SAAK;;AAEP;;AAGF,MAAI,kBAAkB,MAAM;AAC1B,OAAI,MAAM,WAAW,gBAAgB,EAAE,EAAE;AACvC,SAAK,eAAe,SAAS;AAC7B,qBAAiB;;AAEnB;;AAGF,MAAI,eAAe;AACjB,OAAI,SAAS,OAAO,SAAS,KAAK;AAChC,SAAK;AACL;;AAEF,OAAI,SAAS,IACX,iBAAgB;AAElB;;AAGF,MAAI,eAAe;AACjB,OAAI,SAAS,QAAO,SAAS,MAAK;AAChC,SAAK;AACL;;AAEF,OAAI,SAAS,KACX,iBAAgB;AAElB;;AAGF,MAAI,SAAS,OAAO,SAAS,KAAK;AAChC,mBAAgB;AAChB,QAAK;AACL;;AAGF,MAAI,SAAS,OAAO,SAAS,KAAK;AAChC,uBAAoB;AACpB,QAAK;AACL;;AAGF,MAAI,SAAS,KAAK;AAChB,sCAAmC;AACnC,mBAAgB;AAChB;;AAGF,MAAI,SAAS,MAAK;AAChB,sCAAmC;AACnC,mBAAgB;AAChB;;AAGF,MAAI,SAAS,KAAK;GAChB,MAAM,OAAO,MAAM,MAAM,EAAE;GAC3B,MAAM,QAAQ,KAAK,MAAM,8BAA8B,IAAI,KAAK,MAAM,QAAQ;AAC9E,OAAI,SAAS,MAAM;AACjB,uCAAmC;AACnC,qBAAiB,MAAM;AACvB,SAAK,MAAM,GAAG,SAAS;AACvB;;;AAIJ,MAAI,SAAS,KAAK;AAChB,sCAAmC;AACnC;;AAGF,MAAI,CAAC,KAAK,KAAK,KAAK,CAClB,oCAAmC;;AAIvC,QACE,oCACA,CAAC,iBACD,CAAC,iBACD,sBAAsB,KACtB,kBAAkB;;;;;;;;;;AC9GtB,SAAgB,wBAAwB,SAAyB;CAC/D,IAAI;AACJ,KAAI;AACF,eAAa,MAAMC,QAAM;UAClB,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,QAAM,IAAI,MACR,oBAAoB,QAAQ,wGAC7B;;CAEH,MAAM,4BAAY,IAAI,KAAa;CAEnC,MAAM,UAAU,YAAY,YAAY,EACtC,WAAW,aAAa;AACtB,YAAU,IAAI,SAAS,KAAK;AAE5B,SAAO,OAAO,CAAC,SAAS,SAAS;AACjC,SAAO;IAEV,EAAE;AAEH,MAAK,MAAM,aAAa,WACtB,SAAQ,UAAU,UAAU;AAG9B,QAAO,CAAC,GAAG,UAAU;;AAGvB,SAAS,gBAAgB,aAA0C;CACjE,MAAM,2BAAW,IAAI,KAAqB;AAE1C,MAAK,MAAM,QAAQ,YACjB,KAAI,KAAK,SAAS,SAAS;EACzB,MAAM,YAAY,KAAK,KAAK;EAC5B,MAAM,QAAQ,KAAK,KAAK,SAAS;AACjC,WAAS,IAAI,OAAO,UAAU;;AAIlC,QAAO;;;;;;;;;;;;;;AAmBT,SAAgB,sBAAsB,SAAoC;AACxE,KAAI;EACF,MAAM,aAAa,MAAMA,QAAM;AAE/B,OAAK,MAAM,aAAa,YAAY;AAClC,OAAI,UAAU,SAAS,YAAY,CAAC,UAAU,QAC5C;GAGF,MAAM,WAAW,gBAAgB,UAAU,QAAQ,EAAE,CAAC;GACtD,MAAM,QAAsB,EAAE;GAC9B,IAAI,cAAc;AAElB,QAAK,MAAM,UAAU,UAAU,QAC7B,KAAI,OAAO,KAAK,SAAS,SAAS,OAAO,KAAK,SAAS,KAAK;AAC1D,kBAAc;AACd,QAAI,OAAO,KAAK,OAAO;KACrB,MAAM,WAAW,SAAS,IAAI,OAAO,KAAK,MAAM,KAAK;AACrD,WAAM,KAAK;MAAE,MAAM;MAAY,WAAW,WAAW,CAAC,SAAS,GAAG,EAAE;MAAE,CAAC;UAEvE,OAAM,KAAK;KAAE,MAAM;KAAY,WAAW,CAAC,GAAG,IAAI,IAAI,SAAS,QAAQ,CAAC,CAAC;KAAE,CAAC;UAEzE;IACL,MAAM,OAAO,OAAO,OAAO,SAAS,OAAO,KAAK,SAAS,QAAQ,OAAO,KAAK,OAAO;AACpF,QAAI,KACF,OAAM,KAAK;KAAE,MAAM;KAAY;KAAM,CAAC;;AAK5C,UAAO,cAAc,QAAQ;;AAG/B,SAAO;SACD;AACN,SAAO;;;;;;;;;;;;ACvFX,eAAsB,mBACpB,QACA,WAC4B;CAC5B,MAAM,6BAAgC,IAAI,KAAK;CAC/C,MAAM,WAAW,OAAO,KAAK;AAE7B,KAAI,CAAC,YAAY,EAAE,WAAW,aAAa,SAAS,MAAM,WAAW,EACnE,QAAO;CAGT,MAAM,YAAY,qBAAqB,SAAS;AAEhD,OAAM,QAAQ,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,MAAI;GACF,MAAM,SAAS,MAAM,OAAO,cAAc,SAAS,CAAC;AAEpD,QAAK,MAAM,iBAAiB,OAAO,OAAO,OAAO,EAAE;IACjD,MAAM,SAAS,mBAAmB,UAAU,cAAc;AAC1D,QAAI,CAAC,OAAO,QACV;AAGF,eAAW,IAAI,OAAO,KAAK,MAAM,OAAO,KAAK,OAAO,KAAK,OAAO,CAAC;;UAE7D;GAGR,CACH;AAED,QAAO;;;;;ACVT,MAAM,oBAAoB,EAAE,KAAK,CAAC,OAAO,MAAM,CAAC;AAChD,MAAM,yBAAyB,EAAE,OAAO;CACtC,aAAa,EAAE,QAAQ,CAAC,UAAU;CAClC,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,QAAQ;CACR,aAAa,EAAE,QAAQ;CACxB,CAAC;AACF,MAAM,qBAAqB,uBAAuB,OAAO,EACvD,OAAO,EAAE,QAAQ,EAClB,CAAC;AA4CF,eAAe,yBACb,aACA,SACA,QACA,YACiB;AACjB,KAAI,WAAW,WAAW,EACxB,OAAM,IAAI,MAAM,wCAAwC;AAG1D,KAAI,WAAW,WAAW,EACxB,QAAO,WAAW;CAGpB,MAAM,YAAY,wBAAwBC,QAAM;AAChD,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MACR,8DAA8D,WAAW,KAAK,KAAK,CAAC,GACrF;CAGH,MAAM,mBAAmB,MAAM,sBAAsB;EACnD;EACA;EACA;EACA;EACD,CAAC;CAEF,MAAM,gBAAgB,UAAU,QAAQ,aAAa,CAAC,iBAAiB,IAAI,SAAS,CAAC;AACrF,KAAI,cAAc,SAAS,EACzB,OAAM,IAAI,MAAM,gDAAgD,cAAc,KAAK,KAAK,CAAC,GAAG;CAG9F,MAAM,sBAAsB,IAAI,IAAI,iBAAiB,QAAQ,CAAC;AAC9D,KAAI,oBAAoB,SAAS,EAC/B,QAAO,CAAC,GAAG,oBAAoB,CAAC;AAGlC,OAAM,IAAI,MACR,oDAAoD,CAAC,GAAG,oBAAoB,CAAC,KAAK,KAAK,CAAC,GACzF;;AAGH,eAAe,YAAY,SAA2B;CACpD,MAAM,SAAS,uBAAuB,UAAU,QAAQ;AAExD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;CAOjD,MAAM,SAAS,MAAM,mBAJD,MAAM,gBAAgB;EACxC,YAAY;EACZ,SAAS,OAAO,KAAK;EACtB,CAAC,CACkD;CACpD,MAAM,cAAc,gBAAgB;EAClC,aAAa,OAAO,KAAK;EACzB,SAAS,OAAO,KAAK;EACtB,CAAC;CACF,MAAM,EAAE,WAAW,MAAM,WAAW,QAAQ,WAAW;CACvD,MAAM,aAAa,qBAAqB,OAAO;CAC/C,MAAM,EAAE,gBAAgB,MAAM,OAAO,eAAe;EAClD;EACA,iBAAiB,OAAO;EACzB,CAAC;AAEF,KAAI,CAAC,aAAa,cAChB,OAAM,IAAI,MAAM,eAAe,OAAO,KAAK,uCAAuC;CAGpF,MAAM,EAAE,aAAa,wBAAwB,MAAM,OAAO,mBAAmB;EAC9D;EACb,eAAe,YAAY;EAC3B,MAAM,OAAO,KAAK;EACnB,CAAC;AAEF,KAAI,CAAC,oBACH,OAAM,IAAI,MAAM,gBAAgB,OAAO,KAAK,YAAY,aAAa;AAGvE,QAAO;EACL,QAAQ,OAAO,KAAK;EACpB;EACA;EACA;EACA;EACA;EACA;EACD;;AAGH,eAAe,SACb,QACA,SACA,MAMiC;CACjC,MAAM,UAAU,mBAAmB,KAAK,MAAM;CAC9C,MAAM,WAAW,MAAM,cAAc;EACnC;EACA,aAAa,KAAK;EAClB,MAAM,aAAa,KAAK,UAAU;EAClC,MAAM,KAAK;EACX,KAAK,KAAK,UAAU;GAClB,WAAW,KAAK;GAChB;GACD,CAAC;EACF;EACD,CAAC;AAEF,KAAI,CAAC,SAAS,QACZ,OAAM,IAAI,MAAM,SAAS,MAAM;AAGjC,QAAO;EACL,QAAQ;EACR,WAAW,KAAK;EAChB,OAAO,KAAK;EACZ,QAAQ,qBAAqB,SAAS,OAAO;EAC9C;;AAGH,eAAe,SACb,QACA,SACA,aACA,aACA,MAKiC;CACjC,MAAM,EAAE,cAAc,gBAAgB,MAAM,sBAC1C,YAAY,KACZ,YAAY,UACZ,YAAY,aACb;CAED,MAAM,WAAW,MAAM,cAAc;EACnC;EACA,aAAa,KAAK;EAClB,MAAM;EACN,MAAM,KAAK;EACX,KAAK,KAAK,UAAU;GAClB,UAAU,GAAG,YAAY,IAAI;GAC7B;GACA,OAAO,KAAK;GACb,CAAC;EACF;EACD,CAAC;AAEF,KAAI,CAAC,SAAS,QACZ,OAAM,IAAI,MAAM,SAAS,MAAM;AAGjC,QAAO;EACL,QAAQ;EACR,OAAO,KAAK;EACZ,QAAQ,qBAAqB,SAAS,OAAO;EAC9C;;AAGH,SAAS,qBAAqB,QAAyB;AACrD,KAAI,CAAC,OACH,QAAO;AAGT,KAAI;AACF,SAAO,KAAK,MAAM,OAAO;SACnB;AACN,SAAO;;;;;;;;;;;;AAaX,eAAsB,yBAAyB,MAKhB;AAC7B,KAAI,KAAK,SAAS,KAChB,QAAO;EACL,MAAM;EACN,OAAO,KAAK;EACb;AAGH,KAAI,KAAK,QAAQ,KACf,QAAO;EACL,MAAM;EACN,OAAO,MAAM,GAAG,SAAS,KAAK,MAAM,QAAQ;EAC7C;AAGH,KAAI,KAAK,KACP,QAAO,MAAM,wBAAwB,KAAK,OAAO;AAGnD,QAAO,EACL,MAAM,QACP;;AAGH,eAAe,wBAAwB,QAAiD;AACtF,KAAI,CAAC,QAAQ,MAAM,SAAS,CAAC,QAAQ,OAAO,MAC1C,OAAM,IAAI,MACR,4FACD;CAGH,MAAM,SAAS,kBAAkB;CAEjC,MAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,KAAK,QAAQ,EAAE,gBAAgB,CAAC;CACtE,MAAM,gBAAgB,WAAW,QAAQ,QAAQ;CACjD,MAAM,WAAW,KAAK,KAAK,SAAS,SAAS,gBAAgB;CAC7D,MAAM,eAAe;AAErB,KAAI;AACF,QAAM,GAAG,UAAU,UAAU,cAAc,QAAQ;AACnD,MAAI;AACF,SAAM,aAAa,UAAU,OAAO;WAC7B,OAAO;AACd,SAAM,IAAI,MACR,gCAAgC,OAAO,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACnG;;EAGH,MAAM,cAAc,MAAM,GAAG,SAAS,UAAU,QAAQ;AACxD,MAAI,YAAY,MAAM,CAAC,WAAW,KAAK,gBAAgB,aACrD,QAAO,EACL,MAAM,SACP;AAGH,SAAO;GACL,MAAM;GACN,OAAO;GACR;WACO;AACR,QAAM,GAAG,GAAG,SAAS;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;;;;;;AAS1D,eAAsB,MAAM,SAAqD;CAC/E,MAAM,SAAS,mBAAmB,UAAU,QAAQ;AACpD,KAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,OAAO,MAAM,OAAO,GAAG,QAAQ;AAIjD,QAAO,OADU,MAAM,qBAAqB,OAAO,KAAK,EAClC,OAAO,KAAK,MAAM;;AAG1C,eAAe,qBACb,SAC0D;CAC1D,MAAM,EAAE,QAAQ,aAAa,QAAQ,aAAa,qBAAqB,QAAQ,eAC7E,MAAM,YAAY,QAAQ;CAC5B,MAAM,cAAc,MAAM,kBAAkB,OAAO;CACnD,MAAM,UAAU,OAAO,mBAAmB;EACxC,WAAW,YAAY;EACvB,iBAAiB,oBAAoB;EACtC,CAAC;AAEF,QAAO,OAAO,gBAAwB;EACpC,IAAI;AAEJ,MAAI;AACF,WAAQ,QAAR;IACE,KAAK;AACH,iBAAY,MAAM,yBAAyB,aAAa,aAAa,QAAQ,WAAW;AAOxF,YAAO,kBANQ,MAAM,SAAS,QAAQ,SAAS;MAC7C;MACA;MACA;MACA,OAAO;MACR,CAAC,EAC+B,QAAQ,WAAW,YAAY;IAElE,KAAK,MACH,QAAO,MAAM,SAAS,QAAQ,SAAS,aAAa,qBAAqB;KACvE;KACA;KACA,OAAO;KACR,CAAC;IACJ,QACE,OAAM,IAAI,MAAM,6BAA6B,SAAyB;;WAEnE,OAAO;AACd,SAAM,uBAAuB;IAC3B;IACA;IACA;IACA,aAAa,QAAQ;IACtB,CAAC;;;;AAKR,SAAS,2BAA2B,OAAyB;AAC3D,KAAI,EAAE,iBAAiB,UAAU,EAAE,UAAU,OAC3C,QAAO;AAET,QAAO,MAAM,SAAS,eAAe,MAAM,SAAS;;;;;;;AAQtD,SAAgB,mBAAmB,OAAmC;CACpE,MAAM,UAAU,MAAM,MAAM;AAC5B,KAAI,CAAC,QAAQ,WAAW,KAAK,CAC3B,QAAO;AAGT,KAAI,YAAY,SAAS,YAAY,SACnC,QAAO;AAGT,KAAI,YAAY,YAAY,YAAY,SAAS,YAAY,MAC3D,QAAO;AAGT,KAAI,YAAY,aAAa,YAAY,MACvC,QAAO;AAGT,QAAO;;;;;;;;AAST,SAAgB,2BACd,eACA,aACqB;AACrB,KAAI,cAAc,WAAW,KAAK,YAAY,WAAW,EACvD,QAAO;AAGT,QAAO;;;;;AAMT,SAAS,kBAAwB;AAC/B,SAAQ,OAAO,MAAM,QAAU;;AAGjC,eAAe,QACb,SAGe;AACf,KAAI,CAAC,QAAQ,MAAM,SAAS,CAAC,QAAQ,OAAO,MAC1C,OAAM,IAAI,MACR,4FACD;CAGH,MAAM,UAAU,MAAM,qBAAqB,QAAQ;CACnD,MAAM,KAAK,gBAAgB;EACzB,OAAO,QAAQ;EACf,QAAQ,QAAQ;EACjB,CAAC;AAEF,QAAO,KAAK,YAAY,QAAQ,OAAO,aAAa,CAAC,aAAa;AAClE,QAAO,KAAK,sCAAsC;CAElD,MAAM,QAAkB,EAAE;AAE1B,KAAI;AACF,SAAO,MAAM;GACX,MAAM,SAAS,MAAM,WAAW,IAAI,GAAG,QAAQ,OAAO,MAAM;GAC5D,IAAI;GACJ,IAAI,kBAA8C;GAClD,MAAM,aAAa,IAAI,iBAAiB;GACxC,MAAM,qBAAqB;AACzB,sBAAkB,2BAA2B,OAAO,GAAG,KAAK;AAC5D,QAAI,oBAAoB,SAAS;AAC/B,WAAM,SAAS;AACf,QAAG,MAAM,MAAM;MACb,MAAM;MACN,MAAM;MACP,CAAC;AACF,aAAQ,OAAO,MAAM,KAAK;UAE1B,IAAG,OAAO;AAEZ,eAAW,OAAO;;AAGpB,MAAG,KAAK,UAAU,aAAa;AAE/B,OAAI;AACF,WAAO,MAAM,GAAG,SAAS,QAAQ,EAC/B,QAAQ,WAAW,QACpB,CAAC;YACK,OAAO;AACd,OAAG,IAAI,UAAU,aAAa;AAC9B,QAAI,WAAW,OAAO,SAAS;AAC7B,SAAI,oBAAoB,OACtB;AAEF;;AAEF,QAAI,2BAA2B,MAAM,CACnC;AAEF,UAAM;aACE;AACR,OAAG,IAAI,UAAU,aAAa;;GAEhC,MAAM,UAAU,KAAK,MAAM;AAE3B,OAAI,MAAM,WAAW,KAAK,YAAY,GACpC;AAGF,OAAI,MAAM,WAAW,GAAG;IACtB,MAAM,UAAU,mBAAmB,QAAQ;AAC3C,QAAI,YAAY,OACd;AAEF,QAAI,YAAY,QAAQ;AACtB,mBAAc,QAAQ,OAAO;AAC7B;;AAEF,QAAI,YAAY,SAAS;AACvB,sBAAiB;AACjB;;AAEF,QAAI,YAAY,WAAW;AACzB,YAAO,KAAK,oBAAoB,UAAU;AAC1C;;;AAIJ,SAAM,KAAK,KAAK;AAEhB,OAAI,QAAQ,WAAW,OACrB;QAAI,CAAC,mBAAmB,MAAM,KAAK,KAAK,CAAC,CACvC;cAEO,CAAC,uBAAuB,MAAM,KAAK,KAAK,CAAC,CAClD;GAGF,MAAM,YAAY,iBAAiB,OAAO,QAAQ,OAAO;AACzD,SAAM,SAAS;AAEf,OAAI,UAAU,WAAW,EACvB;AAGF,OAAI;AACF,QAAI,QAAQ,WAAW,OAAO;KAC5B,MAAMC,WAAS,MAAM,QAAQ,UAAU;AACvC,SAAIA,SAAO,WAAW,MACpB,OAAM,IAAI,MAAM,uCAAuCA,SAAO,SAAS;AAEzE,oBAAeA,UAAQ,EAAE,MAAM,QAAQ,MAAM,CAAC;AAC9C;;IAGF,MAAM,SAAS,MAAM,QAAQ,UAAU;AACvC,QAAI,OAAO,WAAW,MACpB,OAAM,IAAI,MAAM,uCAAuC,OAAO,SAAS;AAEzE,mBAAe,QAAQ,EAAE,MAAM,QAAQ,MAAM,CAAC;YACvC,OAAO;AACd,QAAI,WAAW,MAAM,EAAE;AACrB,YAAO,IAAI,MAAM,QAAQ,CAAC;AAC1B;;AAEF,QAAI,iBAAiB,OAAO;AAC1B,YAAO,MAAM,MAAM,QAAQ;AAC3B;;AAEF,WAAO,MAAM,OAAO,MAAM,CAAC;;;WAGvB;AACR,KAAG,OAAO;;;AAId,SAAS,iBAAiB,OAAiB,QAA6B;AACtE,KAAI,WAAW,MACb,QAAO,MAAM,KAAK,KAAK,CAAC,MAAM;CAGhC,IAAI,MAAM,MAAM;AAChB,QAAO,MAAM,KAAK,MAAM,MAAM,GAAG,MAAM,KAAK,GAC1C,QAAO;AAET,QAAO,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,KAAK,CAAC,MAAM;;AAG9C,SAAS,cAAc,QAA2B;AAChD,QAAO,IAAI,iBAAiB;AAC5B,QAAO,IAAI,4CAA4C;AACvD,QAAO,IAAI,8CAA8C;AACzD,QAAO,IAAI,sCAAsC;AACjD,QAAO,IAAI,6CAA6C;AACxD,KAAI,WAAW,OAAO;AACpB,SAAO,IAAI,6DAA6D;AACxE;;AAEF,QAAO,IAAI,mEAAmE;;;;;;;AAQhF,eAAe,SAAS,SAA8D;CACpF,MAAM,SAAS,MAAM,MAAM;EACzB,GAAG;EACH,QAAQ;EACT,CAAC;AAEF,KAAI,OAAO,WAAW,MACpB,OAAM,IAAI,MAAM,uCAAuC,OAAO,SAAS;AAGzE,QAAO;;;;;;;AAQT,eAAe,SAAS,SAA8D;CACpF,MAAM,SAAS,MAAM,MAAM;EACzB,GAAG;EACH,QAAQ;EACT,CAAC;AAEF,KAAI,OAAO,WAAW,MACpB,OAAM,IAAI,MAAM,uCAAuC,OAAO,SAAS;AAGzE,QAAO;;AAGT,eAAe,kBACb,QACA,QACA,WACA,YACiC;AACjC,KAAI,CAAC,qBAAqB,OAAO,OAAO,IAAI,OAAO,OAAO,KAAK,WAAW,EACxE,QAAO;CAGT,MAAM,WAAW,sBAAsBC,WAAS;AAChD,KAAI,CAAC,SACH,QAAO;AAGT,KAAI;EAEF,MAAM,gBAAgB,yBAAyB,UAD5B,MAAM,mBAAmB,QAAQ,UAAU,CACM;AACpE,MAAI,cAAc,WAAW,EAC3B,QAAO;EAGT,MAAM,cAAc,OAAO,OAAO,KAAK,KAAK,QAAQ,qBAAqB,KAAK,cAAc,CAAC;AAE7F,SAAO;GACL,GAAG;GACH,QAAQ;IACN,GAAG,OAAO;IACV,MAAM;IACP;GACF;SACK;AACN,SAAO;;;AAIX,MAAM,qBAAqB,CAAC,KAAK;AAEjC,SAAS,yBACP,UACA,YACU;CACV,MAAM,QAAkB,EAAE;AAE1B,MAAK,MAAM,QAAQ,SACjB,KAAI,KAAK,SAAS,WAChB,OAAM,KAAK,KAAK,KAAK;KAErB,MAAK,MAAM,YAAY,KAAK,WAAW;AACrC,QAAM,KAAK,GAAG,mBAAmB;AACjC,QAAM,KAAK,GAAI,WAAW,IAAI,SAAS,IAAI,EAAE,CAAE;;AAKrD,QAAO;;AAGT,SAAS,qBAAqB,KAAmB,eAAuC;CACtF,MAAM,UAAwB,EAAE;CAChC,MAAM,UAAU,IAAI,IAAI,OAAO,KAAK,IAAI,CAAC;CAKzC,MAAM,kCAAkB,IAAI,KAAqB;AACjD,MAAK,MAAM,OAAO,QAChB,iBAAgB,IAAI,IAAI,aAAa,EAAE,IAAI;AAG7C,MAAK,MAAM,OAAO,eAAe;EAC/B,MAAM,WAAW,gBAAgB,IAAI,IAAI,aAAa,CAAC;AACvD,MAAI,YAAY,QAAQ,QAAQ,IAAI,SAAS,EAAE;AAC7C,WAAQ,YAAY,IAAI;AACxB,WAAQ,OAAO,SAAS;AACxB,mBAAgB,OAAO,IAAI,aAAa,CAAC;;;AAI7C,MAAK,MAAM,OAAO,QAChB,SAAQ,OAAO,IAAI;AAGrB,QAAO;;AAGT,MAAa,eAAe,cAAc;CACxC,MAAM;CACN,aAAa;CACb,MAAM,EACH,OAAO;EACN,GAAG;EACH,GAAG;EACH,GAAG;EACH,QAAQ,IAAI,mBAAmB,EAC7B,aAAa,6BACd,CAAC;EACF,OAAO,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAChC,OAAO;GACP,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,QAAQ,CAAC,UAAU,EAAE;GAC/B,OAAO;GACP,aAAa;GACd,CAAC;EACF,MAAM,IAAI,EAAE,SAAS,CAAC,QAAQ,MAAM,EAAE,EACpC,aAAa,iEACd,CAAC;EACF,aAAa,IAAI,EAAE,QAAQ,EAAE;GAC3B,OAAO;GACP,aAAa;GACd,CAAC;EACH,CAAC,CACD,aAAa,MAAM,QAAQ;AAC1B,MAAI,KAAK,SAAS,QAAQ,KAAK,QAAQ,KACrC,KAAI,SAAS;GACX,MAAM;GACN,MAAM,CAAC,OAAO;GACd,SAAS;GACV,CAAC;AAGJ,MAAI,KAAK,QAAQ,KAAK,SAAS,KAC7B,KAAI,SAAS;GACX,MAAM;GACN,MAAM,CAAC,OAAO;GACd,SAAS;GACV,CAAC;AAGJ,MAAI,KAAK,QAAQ,KAAK,QAAQ,KAC5B,KAAI,SAAS;GACX,MAAM;GACN,MAAM,CAAC,OAAO;GACd,SAAS;GACV,CAAC;GAEJ,CACD,QAAQ;CACX,KAAK,eAAe,OAAO,SAAS;EAClC,MAAM,OAAO,MAAM,yBAAyB;GAC1C,OAAO,KAAK;GACZ,MAAM,KAAK;GACX,MAAM,KAAK;GACX,QAAQ,KAAK;GACd,CAAC;EAEF,MAAM,gBAAkC;GACtC,aAAa,KAAK;GAClB,SAAS,KAAK;GACd,YAAY,KAAK;GACjB,QAAQ,KAAK;GACb,aAAa,KAAK;GACnB;AAED,MAAI,KAAK,SAAS,SAAS;AACzB,UAAO,KAAK,uDAAuD;AACnE;;AAGF,MAAI,KAAK,SAAS,QAAQ;AACxB,SAAM,QAAQ;IACZ,GAAG;IACH,MAAM,KAAK;IACZ,CAAC;AACF;;EAGF,MAAM,cAAc,KAAK;AAEzB,MAAI,KAAK,WAAW,OAAO;AAKzB,kBAJe,MAAM,SAAS;IAC5B,GAAG;IACH,OAAO;IACR,CAAC,EACqB,EAAE,MAAM,KAAK,MAAM,CAAC;AAC3C;;AAOF,iBAJe,MAAM,SAAS;GAC5B,GAAG;GACH,OAAO;GACR,CAAC,EACqB,EAAE,MAAM,KAAK,MAAM,CAAC;GAC3C;CACH,CAAC;AAEF,SAAS,qBAAqB,OAA6C;AACzE,KAAI,CAAC,SAAS,OAAO,UAAU,SAC7B,QAAO;CAGT,MAAM,YAAY;AAClB,QAAO,MAAM,QAAQ,UAAU,KAAK,IAAI,OAAO,UAAU,aAAa;;AAGxE,SAAS,qBACP,YACA,UAA8B,EAAE,EAC1B;AACN,KAAI,WAAW,KAAK,WAAW,GAAG;AAChC,MAAI,QAAQ,MAAM;AAChB,UAAO,IAAI;IAAE,SAAS,EAAE;IAAE,UAAU;IAAG,CAAC;AACxC;;AAEF,SAAO,KAAK,oBAAoB;AAChC;;AAGF,KAAI,QAAQ,MAAM;AAChB,SAAO,IAAI;GAAE,SAAS,WAAW;GAAM,UAAU,WAAW;GAAU,CAAC;AACvE;;AAGF,QAAO,IAAI,WAAW,MAAM,EAAE,UAAU,MAAM,CAAC;AAC/C,QAAO,IAAI,SAAS,WAAW,WAAW;;AAG5C,SAAS,mBAAmB,SAAyB;AACnD,KAAI;EACF,MAAM,aAAaC,MAASH,QAAM;AAClC,MAAI,WAAW,WAAW,EAAG,QAAO,EAAE;AACtC,SAAO,WAAW,KAAK,MAAM,MAAM,UAAU,EAAE,CAAC;SAC1C;EACN,MAAM,UAAUA,QAAM,MAAM;AAC5B,SAAO,QAAQ,SAAS,IAAI,CAAC,QAAQ,GAAG,EAAE;;;AAI9C,SAAS,0BAA0B,OAA+C;AAChF,QAAO,MAAM,QAAQ,MAAM,IAAI,MAAM,SAAS,KAAK,MAAM,MAAM,qBAAqB;;AAGtF,SAAS,eAAe,QAAgC,UAA8B,EAAE,EAAQ;AAC9F,KAAI,0BAA0B,OAAO,OAAO,EAAE;AAC5C,MAAI,QAAQ,MAAM;AAChB,UAAO,IAAI,OAAO,OAAO,KAAK,OAAO;IAAE,SAAS,EAAE;IAAM,UAAU,EAAE;IAAU,EAAE,CAAC;AACjF;;EAEF,MAAM,UAAU,mBAAmB,OAAO,MAAM;AAChD,OAAK,IAAI,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC7C,OAAI,IAAI,EAAG,QAAO,IAAI,GAAG;AACzB,UAAO,KAAK,QAAQ,MAAM,aAAa,IAAI,IAAI;AAC/C,wBAAqB,OAAO,OAAO,IAAI,QAAQ;;AAEjD;;AAGF,KAAI,qBAAqB,OAAO,OAAO,EAAE;AACvC,uBAAqB,OAAO,QAAQ,QAAQ;AAC5C;;AAGF,QAAO,IAAI;EACT,QAAQ,OAAO;EACf,OAAO,OAAO;EACd,QAAQ,OAAO;EAChB,CAAC;;AAGJ,SAAS,eAAe,QAAgC,UAA8B,EAAE,EAAQ;AAC9F,KAAI,QAAQ,MAAM;AAChB,SAAO,IAAI,EACT,QAAQ,OAAO,QAChB,CAAC;AACF;;AAGF,QAAO,IAAI,KAAK,UAAU,OAAO,QAAQ,MAAM,EAAE,CAAC"}