sanity 3.77.3-server-side-schemas.16 → 3.77.3-server-side-schemas.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"_internal.js","sources":["../../src/_internal/cli/util/timing.ts","../../src/_internal/cli/actions/manifest/extractManifestAction.ts","../../src/_internal/cli/actions/schema/schemaListAction.ts","../../src/_internal/cli/actions/schema/storeSchemasAction.ts","../../src/_internal/cli/commands/app/appGroup.ts","../../src/_internal/cli/commands/app/buildCommand.ts","../../src/_internal/cli/commands/app/deployCommand.ts","../../src/_internal/cli/commands/app/devCommand.ts","../../src/_internal/cli/util/isInteractive.ts","../../src/_internal/cli/commands/dev/devCommand.ts","../../src/_internal/cli/commands/app/startCommand.ts","../../src/_internal/cli/commands/backup/backupGroup.ts","../../src/_internal/cli/actions/backup/parseApiErr.ts","../../src/_internal/cli/debug.ts","../../src/_internal/cli/actions/dataset/validateDatasetName.ts","../../src/_internal/cli/actions/dataset/datasetNamePrompt.ts","../../src/_internal/cli/actions/dataset/chooseDatasetPrompt.ts","../../src/_internal/cli/actions/backup/resolveApiClient.ts","../../src/_internal/cli/commands/backup/disableBackupCommand.ts","../../src/_internal/cli/actions/backup/debug.ts","../../src/_internal/cli/actions/backup/archiveDir.ts","../../src/_internal/cli/actions/backup/chooseBackupIdPrompt.ts","../../src/_internal/cli/actions/backup/cleanupTmpDir.ts","../../src/_internal/cli/actions/backup/withRetry.ts","../../src/_internal/cli/actions/backup/downloadAsset.ts","../../src/_internal/cli/actions/backup/downloadDocument.ts","../../src/_internal/cli/actions/backup/fetchNextBackupPage.ts","../../src/_internal/cli/actions/backup/progressSpinner.ts","../../src/_internal/cli/util/humanFileSize.ts","../../src/_internal/cli/util/isPathDirName.ts","../../src/_internal/cli/commands/backup/downloadBackupCommand.ts","../../src/_internal/cli/commands/backup/enableBackupCommand.ts","../../src/_internal/cli/commands/backup/listBackupCommand.ts","../../src/_internal/cli/commands/build/buildCommand.ts","../../src/_internal/cli/actions/cors/addCorsOrigin.ts","../../src/_internal/cli/commands/cors/addCorsOriginCommand.ts","../../src/_internal/cli/commands/cors/corsGroup.ts","../../src/_internal/cli/commands/cors/deleteCorsOriginCommand.ts","../../src/_internal/cli/commands/cors/listCorsOriginsCommand.ts","../../src/_internal/cli/actions/dataset/alias/validateDatasetAliasName.ts","../../src/_internal/cli/actions/dataset/alias/promptForDatasetAliasName.ts","../../src/_internal/cli/commands/dataset/alias/datasetAliasesClient.ts","../../src/_internal/cli/commands/dataset/alias/createAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/deleteAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/linkAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/unlinkAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/aliasCommands.ts","../../src/_internal/cli/actions/dataset/listDatasetCopyJobs.ts","../../src/_internal/cli/util/getClientUrl.ts","../../src/_internal/cli/commands/dataset/copyDatasetCommand.ts","../../src/_internal/cli/commands/dataset/createDatasetCommand.ts","../../src/_internal/cli/commands/dataset/datasetGroup.ts","../../src/_internal/cli/commands/dataset/datasetVisibilityCommand.ts","../../src/_internal/cli/commands/dataset/deleteDatasetCommand.ts","../../src/_internal/cli/commands/dataset/exportDatasetCommand.ts","../../src/_internal/cli/commands/dataset/importDatasetCommand.ts","../../src/_internal/cli/commands/dataset/alias/listAliasesHandler.ts","../../src/_internal/cli/commands/dataset/listDatasetsCommand.ts","../../src/_internal/cli/commands/deploy/deployCommand.ts","../../src/_internal/cli/commands/deploy/undeployCommand.ts","../../src/_internal/cli/commands/documents/createDocumentsCommand.ts","../../src/_internal/cli/commands/documents/deleteDocumentsCommand.ts","../../src/_internal/cli/commands/documents/documentsGroup.ts","../../src/_internal/cli/util/colorizeJson.ts","../../src/_internal/cli/commands/documents/getDocumentsCommand.ts","../../src/_internal/cli/commands/documents/queryDocumentsCommand.ts","../../src/_internal/cli/commands/documents/validateDocumentsCommand.ts","../../src/_internal/cli/commands/exec/execCommand.ts","../../src/_internal/cli/commands/graphql/deleteGraphQLAPICommand.ts","../../src/_internal/cli/commands/graphql/deployGraphQLAPICommand.ts","../../src/_internal/cli/commands/graphql/graphqlGroup.ts","../../src/_internal/cli/commands/graphql/listGraphQLAPIsCommand.ts","../../src/_internal/cli/commands/hook/createHookCommand.ts","../../src/_internal/cli/commands/hook/deleteHookCommand.ts","../../src/_internal/cli/commands/hook/hookGroup.ts","../../src/_internal/cli/commands/hook/printHookAttemptCommand.ts","../../src/_internal/cli/commands/hook/listHookLogsCommand.ts","../../src/_internal/cli/commands/hook/listHooksCommand.ts","../../src/_internal/cli/commands/manifest/extractManifestCommand.ts","../../src/_internal/cli/commands/manifest/manifestGroup.ts","../../src/_internal/cli/commands/migration/constants.ts","../../src/_internal/cli/commands/migration/templates/minimalAdvanced.ts","../../src/_internal/cli/commands/migration/templates/minimalSimple.ts","../../src/_internal/cli/commands/migration/templates/renameField.ts","../../src/_internal/cli/commands/migration/templates/renameType.ts","../../src/_internal/cli/commands/migration/templates/stringToPTE.ts","../../src/_internal/cli/commands/migration/createMigrationCommand.ts","../../src/_internal/cli/commands/migration/utils/resolveMigrationScript.ts","../../src/_internal/cli/commands/migration/listMigrationsCommand.ts","../../src/_internal/cli/commands/migration/migrationGroup.ts","../../src/_internal/cli/util/tree.ts","../../src/_internal/cli/commands/migration/prettyMutationFormatter.ts","../../src/_internal/cli/commands/migration/runMigrationCommand.ts","../../src/_internal/cli/commands/preview/previewCommand.ts","../../src/_internal/cli/commands/schema/deleteSchemaCommand.ts","../../src/_internal/cli/commands/schema/extractSchemaCommand.ts","../../src/_internal/cli/commands/schema/schemaGroup.ts","../../src/_internal/cli/commands/schema/schemaListCommand.ts","../../src/_internal/cli/commands/schema/storeSchemaCommand.ts","../../src/_internal/cli/commands/schema/validateSchemaCommand.ts","../../src/_internal/cli/commands/start/startCommand.ts","../../src/_internal/cli/util/prettifyQuotaError.ts","../../src/_internal/cli/commands/users/inviteUserCommand.ts","../../src/_internal/cli/commands/users/listUsersCommand.ts","../../src/_internal/cli/commands/users/usersGroup.ts","../../src/_internal/cli/commands/index.ts"],"sourcesContent":["import {performance} from 'node:perf_hooks'\n\nexport interface TimeMeasurer {\n start: (name: string) => void\n end: (name: string) => number\n getTimings: () => Record<string, number>\n}\n\nexport function getTimer(): TimeMeasurer {\n const timings: Record<string, number> = {}\n const startTimes: Record<string, number> = {}\n\n function start(name: string): void {\n if (typeof startTimes[name] !== 'undefined') {\n throw new Error(`Timer \"${name}\" already started, cannot overwrite`)\n }\n\n startTimes[name] = performance.now()\n }\n\n function end(name: string): number {\n if (typeof startTimes[name] === 'undefined') {\n throw new Error(`Timer \"${name}\" never started, cannot end`)\n }\n\n timings[name] = performance.now() - startTimes[name]\n return timings[name]\n }\n\n return {start, end, getTimings: () => timings}\n}\n","import {createHash} from 'node:crypto'\nimport {mkdir, writeFile} from 'node:fs/promises'\nimport {dirname, join, resolve} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport chalk from 'chalk'\nimport {minutesToMilliseconds} from 'date-fns'\nimport readPkgUp from 'read-pkg-up'\n\nimport {\n type CreateManifest,\n type CreateWorkspaceManifest,\n type ManifestWorkspaceFile,\n} from '../../../manifest/manifestTypes'\nimport {type ExtractManifestWorkerData} from '../../threads/extractManifest'\nimport {getTimer} from '../../util/timing'\n\nexport const MANIFEST_FILENAME = 'create-manifest.json'\nconst SCHEMA_FILENAME_SUFFIX = '.create-schema.json'\nconst TOOLS_FILENAME_SUFFIX = '.create-tools.json'\n\n/** Escape-hatch env flags to change action behavior */\nconst FEATURE_ENABLED_ENV_NAME = 'SANITY_CLI_EXTRACT_MANIFEST_ENABLED'\nconst EXTRACT_MANIFEST_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] !== 'false'\nconst EXTRACT_MANIFEST_LOG_ERRORS = process.env.SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS === 'true'\n\nconst CREATE_TIMER = 'create-manifest'\n\nconst EXTRACT_TASK_TIMEOUT_MS = minutesToMilliseconds(2)\n\nconst EXTRACT_FAILURE_MESSAGE =\n \"↳ Couldn't extract manifest file. Sanity Create will not be available for the studio.\\n\" +\n ` Disable this message with ${FEATURE_ENABLED_ENV_NAME}=false`\n\nexport interface ExtractManifestFlags {\n path?: string\n}\n\n/**\n * This function will never throw.\n * @returns `undefined` if extract succeeded - caught error if it failed\n */\nexport async function extractManifestSafe(\n args: CliCommandArguments<ExtractManifestFlags>,\n context: CliCommandContext,\n): Promise<Error | undefined> {\n if (!EXTRACT_MANIFEST_ENABLED) {\n return undefined\n }\n\n try {\n await extractManifest(args, context)\n return undefined\n } catch (err) {\n if (EXTRACT_MANIFEST_LOG_ERRORS) {\n context.output.error(err)\n }\n return err\n }\n}\n\nasync function extractManifest(\n args: CliCommandArguments<ExtractManifestFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const {output, workDir} = context\n\n const flags = args.extOptions\n const defaultOutputDir = resolve(join(workDir, 'dist'))\n\n const outputDir = resolve(defaultOutputDir)\n const defaultStaticPath = join(outputDir, 'static')\n\n const staticPath = flags.path ?? defaultStaticPath\n\n const path = join(staticPath, MANIFEST_FILENAME)\n\n const rootPkgPath = readPkgUp.sync({cwd: __dirname})?.path\n if (!rootPkgPath) {\n throw new Error('Could not find root directory for `sanity` package')\n }\n\n const timer = getTimer()\n timer.start(CREATE_TIMER)\n const spinner = output.spinner({}).start('Extracting manifest')\n\n try {\n const workspaceManifests = await getWorkspaceManifests({rootPkgPath, workDir})\n await mkdir(staticPath, {recursive: true})\n\n const workspaceFiles = await writeWorkspaceFiles(workspaceManifests, staticPath)\n\n const manifest: CreateManifest = {\n /**\n * Version history:\n * 1: Initial release.\n * 2: Added tools file.\n */\n version: 2,\n createdAt: new Date().toISOString(),\n workspaces: workspaceFiles,\n }\n\n await writeFile(path, JSON.stringify(manifest, null, 2))\n const manifestDuration = timer.end(CREATE_TIMER)\n\n spinner.succeed(`Extracted manifest (${manifestDuration.toFixed()}ms)`)\n } catch (err) {\n spinner.fail(err.message)\n output.print(chalk.gray(EXTRACT_FAILURE_MESSAGE))\n throw err\n }\n}\n\nasync function getWorkspaceManifests({\n rootPkgPath,\n workDir,\n}: {\n rootPkgPath: string\n workDir: string\n}): Promise<CreateWorkspaceManifest[]> {\n const workerPath = join(\n dirname(rootPkgPath),\n 'lib',\n '_internal',\n 'cli',\n 'threads',\n 'extractManifest.js',\n )\n\n const worker = new Worker(workerPath, {\n workerData: {workDir} satisfies ExtractManifestWorkerData,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n\n let timeout = false\n const timeoutId = setTimeout(() => {\n timeout = true\n worker.terminate()\n }, EXTRACT_TASK_TIMEOUT_MS)\n\n try {\n return await new Promise<CreateWorkspaceManifest[]>((resolveWorkspaces, reject) => {\n const buffer: CreateWorkspaceManifest[] = []\n worker.addListener('message', (message) => buffer.push(message))\n worker.addListener('exit', (exitCode) => {\n if (exitCode === 0) {\n resolveWorkspaces(buffer)\n } else if (timeout) {\n reject(new Error(`Extract manifest was aborted after ${EXTRACT_TASK_TIMEOUT_MS}ms`))\n }\n })\n worker.addListener('error', reject)\n })\n } finally {\n clearTimeout(timeoutId)\n }\n}\n\nfunction writeWorkspaceFiles(\n manifestWorkspaces: CreateWorkspaceManifest[],\n staticPath: string,\n): Promise<ManifestWorkspaceFile[]> {\n const output = manifestWorkspaces.reduce<Promise<ManifestWorkspaceFile>[]>(\n (workspaces, workspace) => {\n return [...workspaces, writeWorkspaceFile(workspace, staticPath)]\n },\n [],\n )\n return Promise.all(output)\n}\n\nasync function writeWorkspaceFile(\n workspace: CreateWorkspaceManifest,\n staticPath: string,\n): Promise<ManifestWorkspaceFile> {\n const [schemaFilename, toolsFilename] = await Promise.all([\n createFile(staticPath, workspace.schema, SCHEMA_FILENAME_SUFFIX),\n createFile(staticPath, workspace.tools, TOOLS_FILENAME_SUFFIX),\n ])\n\n return {\n ...workspace,\n schema: schemaFilename,\n tools: toolsFilename,\n }\n}\n\nconst createFile = async (path: string, content: any, filenameSuffix: string) => {\n const stringifiedContent = JSON.stringify(content, null, 2)\n const hash = createHash('sha1').update(stringifiedContent).digest('hex')\n const filename = `${hash.slice(0, 8)}${filenameSuffix}`\n\n // workspaces with identical data will overwrite each others file. This is ok, since they are identical and can be shared\n await writeFile(join(path, filename), stringifiedContent)\n\n return filename\n}\n","import {type CliCommandArguments, type CliCommandContext, type CliOutputter} from '@sanity/cli'\nimport {type SanityDocument} from '@sanity/client'\nimport chalk from 'chalk'\nimport {size, sortBy, uniqBy} from 'lodash'\n\nimport {type ManifestWorkspaceFile} from '../../../manifest/manifestTypes'\nimport {\n getManifestPath,\n readManifest,\n SCHEMA_STORE_ENABLED,\n throwIfProjectIdMismatch,\n} from './storeSchemasAction'\n\nexport interface SchemaListFlags {\n 'json': boolean\n 'id': string\n 'manifest-dir': string\n}\n\ntype PrintSchemaListArgs = {\n schemas: SanityDocument[]\n output: CliOutputter\n dataset: string\n projectId: string\n path: string\n}\n\nexport const SANITY_WORKSPACE_SCHEMA_TYPE = 'sanity.workspace.schema'\n\nconst printSchemaList = ({\n schemas,\n output,\n}: Omit<PrintSchemaListArgs, 'path' | 'dataset' | 'projectId'>) => {\n const ordered = sortBy(\n schemas.map(({_createdAt: createdAt, _id: id, workspace}) => {\n return [id, workspace.name, workspace.dataset, workspace.projectId, createdAt].map(String)\n }),\n ['createdAt'],\n )\n const headings = ['Id', 'Workspace', 'Dataset', 'ProjectId', 'CreatedAt']\n const rows = ordered.reverse()\n\n const maxWidths = rows.reduce(\n (max, row) => row.map((current, index) => Math.max(size(current), max[index])),\n headings.map((str) => size(str)),\n )\n\n const printRow = (row: string[]) => row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(' ')\n\n output.print(chalk.cyan(printRow(headings)))\n rows.forEach((row) => output.print(printRow(row)))\n}\n\nexport default async function schemaListAction(\n args: CliCommandArguments<SchemaListFlags>,\n context: CliCommandContext,\n): Promise<void> {\n if (!SCHEMA_STORE_ENABLED) {\n return\n }\n\n const flags = args.extOptions\n if (typeof flags.id === 'boolean') throw new Error('Schema ID is empty')\n if (typeof flags['manifest-dir'] === 'boolean') throw new Error('Manifest directory is empty')\n\n const {apiClient, output} = context\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n }).withConfig({apiVersion: 'v2024-08-01'})\n\n const projectId = client.config().projectId\n const dataset = client.config().dataset\n\n if (!projectId || !dataset) {\n output.error('Project ID and dataset must be defined.')\n return\n }\n\n const manifestDir = flags['manifest-dir']\n const manifestPath = getManifestPath(context, manifestDir)\n const manifest = await readManifest(manifestPath, context)\n\n // Gather all schemas\n const results = await Promise.allSettled(\n uniqBy<ManifestWorkspaceFile>(manifest.workspaces, 'dataset').map(async (workspace) => {\n throwIfProjectIdMismatch(workspace, projectId)\n if (flags.id) {\n // Fetch a specific schema by id\n return await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n })\n .getDocument(flags.id)\n }\n // Fetch all schemas\n return await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n useCdn: false,\n })\n .fetch<SanityDocument[]>(`*[_type == $type]`, {\n type: SANITY_WORKSPACE_SCHEMA_TYPE,\n })\n }),\n )\n\n // Log errors and collect successful results\n const schemas = results\n .map((result, index) => {\n if (result.status === 'rejected') {\n const workspace = manifest.workspaces[index]\n output.error(\n chalk.red(\n `Failed to fetch schemas for workspace '${workspace.name}': ${result.reason.message}`,\n ),\n )\n return []\n }\n return result.value\n })\n .flat()\n\n if (schemas.length === 0) {\n output.error(`No schemas found`)\n return\n }\n\n if (flags.json) {\n output.print(`${JSON.stringify(flags.id ? schemas[0] : schemas, null, 2)}`)\n } else {\n printSchemaList({schemas: schemas as SanityDocument[], output})\n }\n}\n","import {readFileSync, statSync} from 'node:fs'\nimport path, {join, resolve} from 'node:path'\n\nimport {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport chalk from 'chalk'\nimport {type Ora} from 'ora'\n\nimport {type ManifestSchemaType, type ManifestWorkspaceFile} from '../../../manifest/manifestTypes'\nimport {\n type ExtractManifestFlags,\n extractManifestSafe,\n MANIFEST_FILENAME,\n} from '../manifest/extractManifestAction'\nimport {SANITY_WORKSPACE_SCHEMA_TYPE} from './schemaListAction'\n\nconst FEATURE_ENABLED_ENV_NAME = 'SANITY_CLI_SCHEMA_STORE_ENABLED'\nexport const SCHEMA_STORE_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] === 'true'\n\nexport interface StoreManifestSchemasFlags {\n 'manifest-dir'?: string\n 'workspace'?: string\n 'id-prefix'?: string\n 'schema-required'?: boolean\n 'verbose'?: boolean\n}\n\nexport const getManifestPath = (context: CliCommandContext, customPath?: string) => {\n const defaultOutputDir = resolve(join(context.workDir, 'dist'))\n\n const outputDir = resolve(defaultOutputDir)\n const defaultStaticPath = join(outputDir, 'static')\n\n const staticPath = customPath ?? defaultStaticPath\n const manifestPath = path.resolve(process.cwd(), staticPath)\n return manifestPath\n}\n\n/**\n * Helper function to read and parse a manifest file with logging\n */\nconst readAndParseManifest = (manifestPath: string, context: CliCommandContext) => {\n const content = readFileSync(manifestPath, 'utf-8')\n const stats = statSync(manifestPath)\n const lastModified = stats.mtime.toISOString()\n context.output.print(\n chalk.gray(`\\n↳ Read manifest from ${manifestPath} (last modified: ${lastModified})`),\n )\n return JSON.parse(content)\n}\n\nexport const readManifest = async (readPath: string, context: CliCommandContext, spinner?: Ora) => {\n const manifestPath = `${readPath}/${MANIFEST_FILENAME}`\n\n try {\n return readAndParseManifest(manifestPath, context)\n } catch (error) {\n // Still log that we're attempting extraction\n spinner!.text = 'Manifest not found, attempting to extract it...'\n\n await extractManifestSafe(\n {\n extOptions: {path: readPath},\n groupOrCommand: 'extract',\n argv: [],\n argsWithoutOptions: [],\n extraArguments: [],\n } as CliCommandArguments<ExtractManifestFlags>,\n context,\n )\n\n // Try reading the manifest again after extraction\n try {\n return readAndParseManifest(manifestPath, context)\n } catch (retryError) {\n const errorMessage = `Failed to read manifest at ${manifestPath}`\n spinner?.fail(errorMessage)\n // We should log the error too for consistency\n context.output.error(errorMessage)\n throw retryError\n }\n }\n}\n\n// At the moment schema store deos not support studios where workspaces have multiple projects\nexport const throwIfProjectIdMismatch = (\n workspace: ManifestWorkspaceFile,\n projectId: string,\n): void => {\n if (workspace.projectId !== projectId) {\n throw new Error(\n `↳ No permissions to store schema for workspace ${workspace.name} with projectId: ${workspace.projectId}`,\n )\n }\n}\n\nexport default async function storeSchemasAction(\n args: CliCommandArguments<StoreManifestSchemasFlags>,\n context: CliCommandContext,\n): Promise<Error | undefined> {\n if (!SCHEMA_STORE_ENABLED) {\n return undefined\n }\n\n const flags = args.extOptions\n\n const schemaRequired = flags['schema-required']\n const workspaceName = flags.workspace\n const idPrefix = flags['id-prefix']\n const verbose = flags.verbose\n const manifestDir = flags['manifest-dir']\n\n if (typeof manifestDir === 'boolean') throw new Error('Manifest directory is empty')\n if (typeof idPrefix === 'boolean') throw new Error('Id prefix is empty')\n if (typeof workspaceName === 'boolean') throw new Error('Workspace is empty')\n\n const {output, apiClient} = context\n\n const spinner = output.spinner({}).start('Storing schemas')\n\n const manifestPath = getManifestPath(context, manifestDir)\n\n try {\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n }).withConfig({apiVersion: 'v2024-08-01'})\n\n const projectId = client.config().projectId\n if (!projectId) throw new Error('Project ID is not defined')\n\n const manifest = await readManifest(manifestPath, context, spinner)\n\n let storedCount = 0\n\n let error: Error | undefined\n\n const saveSchema = async (workspace: ManifestWorkspaceFile) => {\n const id = `${idPrefix ? `${idPrefix}.` : ''}${SANITY_WORKSPACE_SCHEMA_TYPE}.${workspace.name}`\n try {\n throwIfProjectIdMismatch(workspace, projectId)\n const schema = JSON.parse(\n readFileSync(`${manifestPath}/${workspace.schema}`, 'utf-8'),\n ) as ManifestSchemaType\n await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n })\n .transaction()\n .createOrReplace({_type: SANITY_WORKSPACE_SCHEMA_TYPE, _id: id, workspace, schema})\n .commit()\n storedCount++\n spinner.text = `Stored ${storedCount} schemas so far...`\n if (verbose) spinner.succeed(`Schema stored for workspace '${workspace.name}'`)\n } catch (err) {\n error = err\n spinner.fail(\n `Error storing schema for workspace '${workspace.name}':\\n${chalk.red(`${err.message}`)}`,\n )\n if (schemaRequired) throw err\n } finally {\n if (verbose) {\n output.print(\n chalk.gray(`↳ schemaId: ${id}, projectId: ${projectId}, dataset: ${workspace.dataset}`),\n )\n }\n }\n }\n\n // If a workspace name is provided, only save the schema for that workspace\n if (workspaceName) {\n const workspaceToSave = manifest.workspaces.find(\n (workspace: ManifestWorkspaceFile) => workspace.name === workspaceName,\n )\n if (!workspaceToSave) {\n spinner.fail(`Workspace ${workspaceName} not found in manifest`)\n throw new Error(`Workspace ${workspaceName} not found in manifest: projectID: ${projectId}`)\n }\n await saveSchema(workspaceToSave as ManifestWorkspaceFile)\n spinner.succeed(`Stored 1 schemas`)\n } else {\n await Promise.all(\n manifest.workspaces.map(async (workspace: ManifestWorkspaceFile): Promise<void> => {\n await saveSchema(workspace)\n }),\n )\n spinner.succeed(`Stored ${storedCount}/${manifest.workspaces.length} schemas`)\n }\n\n if (error) throw error\n return undefined\n } catch (err) {\n // if this flag is set, throw the error and exit without deploying otherwise just log the error\n if (schemaRequired) throw err\n return err\n } finally {\n output.print(`${chalk.gray('↳ List stored schemas with:')} ${chalk.cyan('sanity schema list')}`)\n }\n}\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst appGroup: CliCommandGroupDefinition = {\n name: 'app',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages non-studio applications',\n}\n\nexport default appGroup\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type BuildSanityStudioCommandFlags} from '../../actions/build/buildAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity app build\n sanity app build --no-minify --source-maps\n`\n\nconst appBuildCommand: CliCommandDefinition = {\n name: 'build',\n group: 'app',\n signature: '[OUTPUT_DIR]',\n description: 'Builds the Sanity application configuration into a static bundle',\n action: async (\n args: CliCommandArguments<BuildSanityStudioCommandFlags>,\n context: CliCommandContext,\n overrides?: {basePath?: string},\n ) => {\n const buildAction = await getBuildAction()\n\n return buildAction(args, context, overrides)\n },\n helpText,\n}\n\nasync function getBuildAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/build/buildAction') = require('../../actions/build/buildAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/build/buildAction')\n\n return mod.default\n}\n\nexport default appBuildCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeployStudioActionFlags} from '../../actions/deploy/deployAction'\nimport {SCHEMA_STORE_ENABLED} from '../../actions/schema/storeSchemasAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n --no-build Don't build the application prior to deploy, instead deploying the version currently in \\`dist/\\`\n ${SCHEMA_STORE_ENABLED ? '--verbose Enable verbose logging for the schema store' : ''}\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity deploy\n sanity deploy --no-minify --source-maps\n`\n\nconst appDeployCommand: CliCommandDefinition = {\n name: 'deploy',\n group: 'app',\n signature: '[SOURCE_DIR] [--no-build] [--source-maps] [--no-minify]',\n description: 'Builds and deploys Sanity application to Sanity hosting',\n action: async (\n args: CliCommandArguments<DeployStudioActionFlags>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/deployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default appDeployCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartDevServerCommandFlags} from '../../actions/dev/devAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity app dev --host=0.0.0.0\n sanity app dev --port=1942\n`\n\nconst appDevCommand: CliCommandDefinition = {\n name: 'dev',\n group: 'app',\n signature: '[--port <port>] [--host <host>]',\n description: 'Starts a local dev server for your Sanity application with live reloading',\n action: async (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const devAction = await getDevAction()\n\n return devAction(args, context)\n },\n helpText,\n}\n\nexport async function getDevAction(): Promise<\n (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => Promise<void>\n> {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/dev/devAction') = require('../../actions/dev/devAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/dev/devAction')\n\n return mod.default\n}\n\nexport default appDevCommand\n","/* eslint-disable no-process-env */\nexport const isInteractive =\n process.stdout.isTTY && process.env.TERM !== 'dumb' && !('CI' in process.env)\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartDevServerCommandFlags} from '../../actions/dev/devAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity dev --host=0.0.0.0\n sanity dev --port=1942\n`\n\nconst devCommand: CliCommandDefinition = {\n name: 'dev',\n signature: '[--port <port>] [--host <host>]',\n description: 'Starts a local dev server for Sanity Studio with live reloading',\n action: async (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const devAction = await getDevAction()\n\n return devAction(args, context)\n },\n helpText,\n}\n\nexport async function getDevAction(): Promise<\n (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => Promise<void>\n> {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/dev/devAction') = require('../../actions/dev/devAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/dev/devAction')\n\n return mod.default\n}\n\nexport default devCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\nimport {isInteractive} from '../../util/isInteractive'\nimport {getDevAction} from '../dev/devCommand'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new CORS-entry to be added.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity app start --host=0.0.0.0\n sanity app start --port=1942\n sanity app start some/build-output-dir\n`\n\nconst appStartCommand: CliCommandDefinition = {\n name: 'start',\n group: 'app',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Previews a built Sanity application',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const {output, chalk, prompt} = context\n const previewAction = await getPreviewAction()\n\n const error = (msg: string) => output.warn(chalk.red.bgBlack(msg))\n\n try {\n await previewAction(args, context)\n } catch (err) {\n if (err.name !== 'BUILD_NOT_FOUND') {\n throw err\n }\n\n error(err.message)\n error('\\n')\n\n const shouldRunDevServer =\n isInteractive &&\n (await prompt.single({\n message: 'Do you want to start a development server instead?',\n type: 'confirm',\n }))\n\n if (shouldRunDevServer) {\n const devAction = await getDevAction()\n await devAction(args, context)\n } else {\n // Indicate that this isn't an expected exit\n // eslint-disable-next-line no-process-exit\n process.exit(1)\n }\n }\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default appStartCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\n// defaultApiVersion is the backend API version used for dataset backup.\nexport const defaultApiVersion = 'v2024-02-21'\n\nconst datasetBackupGroup: CliCommandGroupDefinition = {\n name: 'backup',\n signature: '[COMMAND]',\n description: 'Manage backups.',\n isGroupRoot: true,\n}\n\nexport default datasetBackupGroup\n","// apiErr is a type that represents an error returned by the API\ninterface ApiErr {\n statusCode: number\n message: string\n}\n\n// parseApiErr is a function that attempts with the best effort to parse\n// an error returned by the API since different API endpoint may end up\n// returning different error structures.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any,@typescript-eslint/explicit-module-boundary-types\nfunction parseApiErr(err: any): ApiErr {\n const apiErr = {} as ApiErr\n if (err.code) {\n apiErr.statusCode = err.code\n } else if (err.statusCode) {\n apiErr.statusCode = err.statusCode\n }\n\n if (err.message) {\n apiErr.message = err.message\n } else if (err.statusMessage) {\n apiErr.message = err.statusMessage\n } else if (err?.response?.body?.message) {\n apiErr.message = err.response.body.message\n } else if (err?.response?.data?.message) {\n apiErr.message = err.response.data.message\n } else {\n // If no message can be extracted, print the whole error.\n apiErr.message = JSON.stringify(err)\n }\n\n return apiErr\n}\n\nexport default parseApiErr\n","import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:core')\n","const MAX_DATASET_NAME_LENGTH = 64\n\nexport function validateDatasetName(datasetName: string): false | string {\n if (!datasetName) {\n return 'Dataset name is missing'\n }\n\n const name = `${datasetName}`\n\n if (name.toLowerCase() !== name) {\n return 'Dataset name must be all lowercase characters'\n }\n\n if (name.length < 2) {\n return 'Dataset name must be at least two characters long'\n }\n\n if (name.length > MAX_DATASET_NAME_LENGTH) {\n return `Dataset name must be at most ${MAX_DATASET_NAME_LENGTH} characters`\n }\n\n if (!/^[a-z0-9]/.test(name)) {\n return 'Dataset name must start with a letter or a number'\n }\n\n if (!/^[a-z0-9][-_a-z0-9]+$/.test(name)) {\n return 'Dataset name must only contain letters, numbers, dashes and underscores'\n }\n\n if (/[-_]$/.test(name)) {\n return 'Dataset name must not end with a dash or an underscore'\n }\n\n return false\n}\n","import {type CliPrompter} from '@sanity/cli'\n\nimport {validateDatasetName} from './validateDatasetName'\n\nexport function promptForDatasetName(\n prompt: CliPrompter,\n options: {message?: string; default?: string} = {},\n): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Dataset name:',\n validate: (name) => {\n const err = validateDatasetName(name)\n if (err) {\n return err\n }\n\n return true\n },\n ...options,\n })\n}\n","import {type CliCommandContext} from '@sanity/cli'\n\nimport {debug} from '../../debug'\nimport {promptForDatasetName} from './datasetNamePrompt'\n\nexport async function chooseDatasetPrompt(\n context: CliCommandContext,\n options: {message?: string; allowCreation?: boolean} = {},\n): Promise<string> {\n const {apiClient, prompt} = context\n const {message, allowCreation} = options\n const client = apiClient()\n\n const datasets = await client.datasets.list()\n const hasProduction = datasets.find((dataset) => dataset.name === 'production')\n const datasetChoices = datasets.map((dataset) => ({value: dataset.name}))\n const selected = await prompt.single({\n message: message || 'Select dataset to use',\n type: 'list',\n choices: allowCreation\n ? [{value: 'new', name: 'Create new dataset'}, new prompt.Separator(), ...datasetChoices]\n : datasetChoices,\n })\n\n if (selected === 'new') {\n debug('User wants to create a new dataset, prompting for name')\n const newDatasetName = await promptForDatasetName(prompt, {\n message: 'Name your dataset:',\n default: hasProduction ? undefined : 'production',\n })\n await client.datasets.create(newDatasetName)\n return newDatasetName\n }\n\n return selected\n}\n","import {type CliCommandContext} from '@sanity/cli'\nimport {type SanityClient} from '@sanity/client'\n\nimport {chooseDatasetPrompt} from '../dataset/chooseDatasetPrompt'\n\ntype ResolvedApiClient = {\n projectId: string\n datasetName: string\n token?: string\n client: SanityClient\n}\n\nasync function resolveApiClient(\n context: CliCommandContext,\n datasetName: string,\n apiVersion: string,\n): Promise<ResolvedApiClient> {\n const {apiClient} = context\n\n let client = apiClient()\n const {projectId, token} = client.config()\n\n if (!projectId) {\n throw new Error('Project ID not defined')\n }\n\n // If no dataset provided, explicitly ask for dataset instead of using dataset\n // configured in Sanity config. Aligns with `sanity dataset export` behavior.\n let selectedDataset: string = datasetName\n if (!selectedDataset) {\n selectedDataset = await chooseDatasetPrompt(context, {\n message: 'Select the dataset name:',\n })\n }\n\n client = client.withConfig({dataset: datasetName, apiVersion})\n\n return {\n projectId,\n datasetName: selectedDataset,\n token,\n client,\n }\n}\n\nexport default resolveApiClient\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst helpText = `\nExamples\n sanity backup disable DATASET_NAME\n`\n\nconst disableDatasetBackupCommand: CliCommandDefinition = {\n name: 'disable',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Disable backup for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const [dataset] = args.argsWithoutOptions\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n try {\n await client.request({\n method: 'PUT',\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,\n body: {\n enabled: false,\n },\n })\n output.print(`${chalk.green(`Disabled daily backups for dataset ${datasetName}\\n`)}`)\n } catch (error) {\n const {message} = parseApiErr(error)\n output.print(`${chalk.red(`Disabling dataset backup failed: ${message}`)}\\n`)\n }\n },\n}\n\nexport default disableDatasetBackupCommand\n","export default require('debug')('sanity:backup')\n","import {createWriteStream} from 'node:fs'\nimport zlib from 'node:zlib'\n\nimport {type ProgressData} from 'archiver'\n\nimport debug from './debug'\n\nconst archiver = require('archiver')\n\n// ProgressCb is a callback that is called with the number of bytes processed so far.\ntype ProgressCb = (processedBytes: number) => void\n\n// archiveDir creates a tarball of the given directory and writes it to the given file path.\nfunction archiveDir(tmpOutDir: string, outFilePath: string, progressCb: ProgressCb): Promise<void> {\n return new Promise((resolve, reject) => {\n const archiveDestination = createWriteStream(outFilePath)\n archiveDestination.on('error', (err: Error) => {\n reject(err)\n })\n\n archiveDestination.on('close', () => {\n resolve()\n })\n\n const archive = archiver('tar', {\n gzip: true,\n gzipOptions: {level: zlib.constants.Z_DEFAULT_COMPRESSION},\n })\n\n archive.on('error', (err: Error) => {\n debug('Archiving errored!\\n%s', err.stack)\n reject(err)\n })\n\n // Catch warnings for non-blocking errors (stat failures and others)\n archive.on('warning', (err: Error) => {\n debug('Archive warning: %s', err.message)\n })\n\n archive.on('progress', (progress: ProgressData) => {\n progressCb(progress.fs.processedBytes)\n })\n\n // Pipe archive data to the file\n archive.pipe(archiveDestination)\n archive.directory(tmpOutDir, false)\n archive.finalize()\n })\n}\n\nexport default archiveDir\n","import {type CliCommandContext} from '@sanity/cli'\n\nimport {defaultApiVersion} from '../../commands/backup/backupGroup'\nimport resolveApiClient from './resolveApiClient'\n\n// maxBackupIdsShown is the maximum number of backup IDs to show in the prompt.\n// Higher numbers will cause the prompt to be slow.\nconst maxBackupIdsShown = 100\n\nasync function chooseBackupIdPrompt(\n context: CliCommandContext,\n datasetName: string,\n): Promise<string> {\n const {prompt} = context\n\n const {projectId, token, client} = await resolveApiClient(context, datasetName, defaultApiVersion)\n\n try {\n // Fetch last $maxBackupIdsShown backups for this dataset.\n // We expect here that API returns backups sorted by creation date in descending order.\n const response = await client.request({\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/backups`,\n query: {limit: maxBackupIdsShown.toString()},\n })\n\n if (response?.backups?.length > 0) {\n const backupIdChoices = response.backups.map((backup: {id: string}) => ({\n value: backup.id,\n }))\n const selected = await prompt.single({\n message: `Select backup ID to use (only last ${maxBackupIdsShown} shown)`,\n type: 'list',\n choices: backupIdChoices,\n })\n\n return selected\n }\n } catch (err) {\n throw new Error(`Failed to fetch backups for dataset ${datasetName}: ${err.message}`)\n }\n\n throw new Error('No backups found')\n}\n\nexport default chooseBackupIdPrompt\n","import {rimraf} from 'rimraf'\n\nimport debug from './debug'\n\nasync function cleanupTmpDir(tmpDir: string): Promise<void> {\n try {\n await rimraf(tmpDir)\n } catch (err) {\n debug(`Error cleaning up temporary files: ${err.message}`)\n }\n}\n\nexport default cleanupTmpDir\n","import debug from './debug'\n\nconst MAX_RETRIES = 5\nconst BACKOFF_DELAY_BASE = 200\n\nconst exponentialBackoff = (retryCount: number) => Math.pow(2, retryCount) * BACKOFF_DELAY_BASE\n\nasync function withRetry<T>(\n operation: () => Promise<T>,\n maxRetries: number = MAX_RETRIES,\n): Promise<T> {\n for (let retryCount = 0; retryCount < maxRetries; retryCount++) {\n try {\n return await operation()\n } catch (err) {\n // Immediately rethrow if the error is not server-related.\n if (err.response && err.response.statusCode && err.response.statusCode < 500) {\n throw err\n }\n\n const retryDelay = exponentialBackoff(retryCount)\n debug(`Error encountered, retrying after ${retryDelay}ms: %s`, err.message)\n await new Promise((resolve) => setTimeout(resolve, retryDelay))\n }\n }\n\n throw new Error('Operation failed after all retries')\n}\n\nexport default withRetry\n","import {createWriteStream} from 'node:fs'\nimport path from 'node:path'\nimport {pipeline} from 'node:stream/promises'\n\nimport {getIt} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {keepAlive, promise} from 'get-it/middleware'\n\nimport debug from './debug'\nimport withRetry from './withRetry'\n\nconst CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds\nconst READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes\n\nconst request = getIt([keepAlive(), promise()])\n\nasync function downloadAsset(\n url: string,\n fileName: string,\n fileType: string,\n outDir: string,\n): Promise<void> {\n // File names that contain a path to file (e.g. sanity-storage/assets/file-name.tar.gz) fail when archive is\n // created due to missing parent dir (e.g. sanity-storage/assets), so we want to handle them by taking\n // the base name as file name.\n const normalizedFileName = path.basename(fileName)\n\n const assetFilePath = getAssetFilePath(normalizedFileName, fileType, outDir)\n await withRetry(async () => {\n const response = await request({\n url: url,\n maxRedirects: 5,\n timeout: {connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT},\n stream: true,\n })\n\n debug('Received asset %s with status code %d', normalizedFileName, response?.statusCode)\n\n await pipeline(response.body, createWriteStream(assetFilePath))\n })\n}\n\nfunction getAssetFilePath(fileName: string, fileType: string, outDir: string): string {\n // Set assetFilePath if we are downloading an asset file.\n // If it's a JSON document, assetFilePath will be an empty string.\n let assetFilePath = ''\n if (fileType === 'image') {\n assetFilePath = path.join(outDir, 'images', fileName)\n } else if (fileType === 'file') {\n assetFilePath = path.join(outDir, 'files', fileName)\n }\n\n return assetFilePath\n}\n\nexport default downloadAsset\n","import {getIt, type MiddlewareResponse} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {keepAlive, promise} from 'get-it/middleware'\n\nimport debug from './debug'\nimport withRetry from './withRetry'\n\nconst CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds\nconst READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes\n\nconst request = getIt([keepAlive(), promise()])\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nasync function downloadDocument(url: string): Promise<any> {\n const response = await withRetry<MiddlewareResponse>(() =>\n request({\n url,\n maxRedirects: 5,\n timeout: {connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT},\n }),\n )\n\n debug('Received document from %s with status code %d', url, response?.statusCode)\n\n return response.body\n}\n\nexport default downloadDocument\n","import {Readable} from 'node:stream'\n\nimport {type QueryParams, type SanityClient} from '@sanity/client'\n\ntype File = {\n name: string\n url: string\n type: string\n}\n\ntype GetBackupResponse = {\n createdAt: string\n totalFiles: number\n files: File[]\n nextCursor?: string\n}\n\nclass PaginatedGetBackupStream extends Readable {\n private cursor = ''\n private readonly client: SanityClient\n private readonly projectId: string\n private readonly datasetName: string\n private readonly backupId: string\n private readonly token: string\n public totalFiles = 0\n\n constructor(\n client: SanityClient,\n projectId: string,\n datasetName: string,\n backupId: string,\n token: string,\n ) {\n super({objectMode: true})\n this.client = client\n this.projectId = projectId\n this.datasetName = datasetName\n this.backupId = backupId\n this.token = token\n }\n\n async _read(): Promise<void> {\n try {\n const data = await this.fetchNextBackupPage()\n\n // Set totalFiles when it's fetched for the first time\n if (this.totalFiles === 0) {\n this.totalFiles = data.totalFiles\n }\n\n data.files.forEach((file: File) => this.push(file))\n\n if (typeof data.nextCursor === 'string' && data.nextCursor !== '') {\n this.cursor = data.nextCursor\n } else {\n // No more pages left to fetch.\n this.push(null)\n }\n } catch (err) {\n this.destroy(err as Error)\n }\n }\n\n // fetchNextBackupPage fetches the next page of backed up files from the backup API.\n async fetchNextBackupPage(): Promise<GetBackupResponse> {\n const query: QueryParams = this.cursor === '' ? {} : {nextCursor: this.cursor}\n\n try {\n return await this.client.request({\n headers: {Authorization: `Bearer ${this.token}`},\n uri: `/projects/${this.projectId}/datasets/${this.datasetName}/backups/${this.backupId}`,\n query,\n })\n } catch (error) {\n // It can be clearer to pull this logic out in a common error handling function for re-usability.\n let msg = error.statusCode ? error.response.body.message : error.message\n\n // If no message can be extracted, print the whole error.\n if (msg === undefined) {\n msg = String(error)\n }\n throw new Error(`Downloading dataset backup failed: ${msg}`)\n }\n }\n}\n\nexport {PaginatedGetBackupStream}\nexport type {File, GetBackupResponse}\n","import {type CliOutputter} from '@sanity/cli'\nimport prettyMs from 'pretty-ms'\n\ntype ProgressEvent = {\n step: string\n update?: boolean\n current?: number\n total?: number\n}\n\ninterface ProgressSpinner {\n set: (progress: ProgressEvent) => void\n update: (progress: ProgressEvent) => void\n succeed: () => void\n fail: () => void\n}\n\nconst newProgress = (output: CliOutputter, startStep: string): ProgressSpinner => {\n let spinner = output.spinner(startStep).start()\n let lastProgress: ProgressEvent = {step: startStep}\n let start = Date.now()\n\n const print = (progress: ProgressEvent) => {\n const elapsed = prettyMs(Date.now() - start)\n if (progress.current && progress.current > 0 && progress.total && progress.total > 0) {\n spinner.text = `${progress.step} (${progress.current}/${progress.total}) [${elapsed}]`\n } else {\n spinner.text = `${progress.step} [${elapsed}]`\n }\n }\n\n return {\n set: (progress: ProgressEvent) => {\n if (progress.step !== lastProgress.step) {\n print(lastProgress) // Print the last progress before moving on\n spinner.succeed()\n spinner = output.spinner(progress.step).start()\n start = Date.now()\n } else if (progress.step === lastProgress.step && progress.update) {\n print(progress)\n }\n lastProgress = progress\n },\n update: (progress: ProgressEvent) => {\n print(progress)\n lastProgress = progress\n },\n succeed: () => {\n spinner.succeed()\n start = Date.now()\n },\n fail: () => {\n spinner.fail()\n start = Date.now()\n },\n }\n}\n\nexport default newProgress\n","function humanFileSize(size: number): string {\n const i = size == 0 ? 0 : Math.floor(Math.log(size) / Math.log(1024))\n return `${(size / Math.pow(1024, i)).toFixed(2)} ${['B', 'kB', 'MB', 'GB', 'TB'][i]}`\n}\n\nexport default humanFileSize\n","function isPathDirName(filepath: string): boolean {\n // Check if the path has an extension, commonly indicating a file\n return !/\\.\\w+$/.test(filepath)\n}\n\nexport default isPathDirName\n","import {createWriteStream, existsSync, mkdirSync} from 'node:fs'\nimport {mkdtemp} from 'node:fs/promises'\nimport {tmpdir} from 'node:os'\nimport path from 'node:path'\nimport {finished} from 'node:stream/promises'\n\nimport {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n type SanityClient,\n} from '@sanity/cli'\nimport {absolutify} from '@sanity/util/fs'\nimport {Mutex} from 'async-mutex'\nimport createDebug from 'debug'\nimport {isString} from 'lodash'\nimport prettyMs from 'pretty-ms'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport archiveDir from '../../actions/backup/archiveDir'\nimport chooseBackupIdPrompt from '../../actions/backup/chooseBackupIdPrompt'\nimport cleanupTmpDir from '../../actions/backup/cleanupTmpDir'\nimport downloadAsset from '../../actions/backup/downloadAsset'\nimport downloadDocument from '../../actions/backup/downloadDocument'\nimport {type File, PaginatedGetBackupStream} from '../../actions/backup/fetchNextBackupPage'\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport newProgress from '../../actions/backup/progressSpinner'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport humanFileSize from '../../util/humanFileSize'\nimport isPathDirName from '../../util/isPathDirName'\nimport {defaultApiVersion} from './backupGroup'\n\nconst debug = createDebug('sanity:backup')\n\nconst DEFAULT_DOWNLOAD_CONCURRENCY = 10\nconst MAX_DOWNLOAD_CONCURRENCY = 24\n\ninterface DownloadBackupOptions {\n projectId: string\n datasetName: string\n token: string\n backupId: string\n outDir: string\n outFileName: string\n overwrite: boolean\n concurrency: number\n}\n\nconst helpText = `\nOptions\n --backup-id <string> The backup ID to download. (required)\n --out <string> The file or directory path the backup should download to.\n --overwrite Allows overwriting of existing backup file.\n --concurrency <num> Concurrent number of backup item downloads. (max: 24)\n\nExamples\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-1\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-2 --out /path/to/file\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('backup-id', {type: 'string'})\n .options('out', {type: 'string'})\n .options('concurrency', {type: 'number', default: DEFAULT_DOWNLOAD_CONCURRENCY})\n .options('overwrite', {type: 'boolean', default: false}).argv\n}\n\nconst downloadBackupCommand: CliCommandDefinition = {\n name: 'download',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Download a dataset backup to a local file.',\n helpText,\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {output, chalk} = context\n const [client, opts] = await prepareBackupOptions(context, args)\n const {projectId, datasetName, backupId, outDir, outFileName} = opts\n\n // If any of the output path or file name is empty, cancel the operation.\n if (outDir === '' || outFileName === '') {\n output.print('Operation cancelled.')\n return\n }\n const outFilePath = path.join(outDir, outFileName)\n\n output.print('╭───────────────────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Downloading backup for: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(56)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(datasetName).padEnd(58)} │`)\n output.print(`│ ${chalk.bold('backupId')}: ${chalk.cyan(backupId).padEnd(56)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────────────────╯')\n output.print('')\n output.print(`Downloading backup to \"${chalk.cyan(outFilePath)}\"`)\n\n const start = Date.now()\n const progressSpinner = newProgress(output, 'Setting up backup environment...')\n\n // Create a unique temporary directory to store files before bundling them into the archive at outputPath.\n // Temporary directories are normally deleted at the end of backup process, any unexpected exit may leave them\n // behind, hence it is important to create a unique directory for each attempt.\n const tmpOutDir = await mkdtemp(path.join(tmpdir(), `sanity-backup-`))\n\n // Create required directories if they don't exist.\n for (const dir of [outDir, path.join(tmpOutDir, 'images'), path.join(tmpOutDir, 'files')]) {\n mkdirSync(dir, {recursive: true})\n }\n\n debug('Writing to temporary directory %s', tmpOutDir)\n const tmpOutDocumentsFile = path.join(tmpOutDir, 'data.ndjson')\n\n // Handle concurrent writes to the same file using mutex.\n const docOutStream = createWriteStream(tmpOutDocumentsFile)\n const docWriteMutex = new Mutex()\n\n try {\n const backupFileStream = new PaginatedGetBackupStream(\n client,\n opts.projectId,\n opts.datasetName,\n opts.backupId,\n opts.token,\n )\n\n const files: File[] = []\n let i = 0\n for await (const file of backupFileStream) {\n files.push(file)\n i++\n progressSpinner.set({\n step: `Reading backup files...`,\n update: true,\n current: i,\n total: backupFileStream.totalFiles,\n })\n }\n\n let totalItemsDownloaded = 0\n // This is dynamically imported because this module is ESM only and this file gets compiled to CJS at this time.\n const {default: pMap} = await import('p-map')\n await pMap(\n files,\n async (file: File) => {\n if (file.type === 'file' || file.type === 'image') {\n await downloadAsset(file.url, file.name, file.type, tmpOutDir)\n } else {\n const doc = await downloadDocument(file.url)\n await docWriteMutex.runExclusive(() => {\n docOutStream.write(`${doc}\\n`)\n })\n }\n\n totalItemsDownloaded += 1\n progressSpinner.set({\n step: `Downloading documents and assets...`,\n update: true,\n current: totalItemsDownloaded,\n total: backupFileStream.totalFiles,\n })\n },\n {concurrency: opts.concurrency},\n )\n } catch (error) {\n progressSpinner.fail()\n const {message} = parseApiErr(error)\n throw new Error(`Downloading dataset backup failed: ${message}`)\n }\n\n docOutStream.end()\n await finished(docOutStream)\n\n progressSpinner.set({step: `Archiving files into a tarball...`, update: true})\n try {\n await archiveDir(tmpOutDir, outFilePath, (processedBytes: number) => {\n progressSpinner.update({\n step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...`,\n })\n })\n } catch (err) {\n progressSpinner.fail()\n throw new Error(`Archiving backup failed: ${err.message}`)\n }\n\n progressSpinner.set({\n step: `Cleaning up temporary files at ${chalk.cyan(`${tmpOutDir}`)}`,\n })\n await cleanupTmpDir(tmpOutDir)\n\n progressSpinner.set({\n step: `Backup download complete [${prettyMs(Date.now() - start)}]`,\n })\n progressSpinner.succeed()\n },\n}\n\n// prepareBackupOptions validates backup options from CLI and prepares Client and DownloadBackupOptions.\nasync function prepareBackupOptions(\n context: CliCommandContext,\n args: CliCommandArguments,\n): Promise<[SanityClient, DownloadBackupOptions]> {\n const flags = await parseCliFlags(args)\n const [dataset] = args.argsWithoutOptions\n const {prompt, workDir} = context\n const {projectId, datasetName, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n const {token} = client.config()\n if (!isString(token) || token.length < 1) {\n throw new Error(`token is missing`)\n }\n\n if (!isString(datasetName) || datasetName.length < 1) {\n throw new Error(`dataset ${datasetName} must be a valid dataset name`)\n }\n\n const backupId = String(flags['backup-id'] || (await chooseBackupIdPrompt(context, datasetName)))\n if (backupId.length < 1) {\n throw new Error(`backup-id ${flags['backup-id']} should be a valid string`)\n }\n\n if ('concurrency' in flags) {\n if (flags.concurrency < 1 || flags.concurrency > MAX_DOWNLOAD_CONCURRENCY) {\n throw new Error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`)\n }\n }\n\n const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`\n let out = await (async (): Promise<string> => {\n if (flags.out !== undefined) {\n // Rewrite the output path to an absolute path, if it is not already.\n return absolutify(flags.out)\n }\n\n const input = await prompt.single({\n type: 'input',\n message: 'Output path:',\n default: path.join(workDir, defaultOutFileName),\n filter: absolutify,\n })\n return input\n })()\n\n // If path is a directory name, then add a default file name to the path.\n if (isPathDirName(out)) {\n out = path.join(out, defaultOutFileName)\n }\n\n // If the file already exists, ask for confirmation if it should be overwritten.\n if (!flags.overwrite && existsSync(out)) {\n const shouldOverwrite = await prompt.single({\n type: 'confirm',\n message: `File \"${out}\" already exists, would you like to overwrite it?`,\n default: false,\n })\n\n // If the user does not want to overwrite the file, set the output path to an empty string.\n // This should be handled by the caller of this function as cancel operation.\n if (!shouldOverwrite) {\n out = ''\n }\n }\n\n return [\n client,\n {\n projectId,\n datasetName,\n backupId,\n token,\n outDir: path.dirname(out),\n outFileName: path.basename(out),\n overwrite: flags.overwrite,\n concurrency: flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY,\n },\n ]\n}\n\nexport default downloadBackupCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst helpText = `\nExamples\n sanity backup enable DATASET_NAME\n`\n\nconst enableDatasetBackupCommand: CliCommandDefinition = {\n name: 'enable',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Enable backup for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const [dataset] = args.argsWithoutOptions\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n try {\n await client.request({\n method: 'PUT',\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,\n body: {\n enabled: true,\n },\n })\n\n output.print(\n `${chalk.green(\n `Enabled backups for dataset ${datasetName}.\\nPlease note that it may take up to 24 hours before the first backup is created.\\n`,\n )}`,\n )\n\n output.print(\n `${chalk.bold(`Retention policies may apply depending on your plan and agreement.\\n`)}`,\n )\n } catch (error) {\n const {message} = parseApiErr(error)\n output.print(`${chalk.red(`Enabling dataset backup failed: ${message}`)}\\n`)\n }\n },\n}\nexport default enableDatasetBackupCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {Table} from 'console-table-printer'\nimport {isAfter, isValid, lightFormat, parse} from 'date-fns'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst DEFAULT_LIST_BACKUP_LIMIT = 30\n\ninterface ListDatasetBackupFlags {\n before?: string\n after?: string\n limit?: string\n}\n\ntype ListBackupRequestQueryParams = {\n before?: string\n after?: string\n limit: string\n}\n\ntype ListBackupResponse = {\n backups: ListBackupResponseItem[]\n}\n\ntype ListBackupResponseItem = {\n id: string\n createdAt: string\n}\n\nconst helpText = `\nOptions\n --limit <int> Maximum number of backups returned. Default 30.\n --after <string> Only return backups after this date (inclusive)\n --before <string> Only return backups before this date (exclusive). Cannot be younger than <after> if specified.\n\nExamples\n sanity backup list DATASET_NAME\n sanity backup list DATASET_NAME --limit 50\n sanity backup list DATASET_NAME --after 2024-01-31 --limit 10\n sanity backup list DATASET_NAME --after 2024-01-31 --before 2024-01-10\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('after', {type: 'string'})\n .options('before', {type: 'string'})\n .options('limit', {type: 'number', default: DEFAULT_LIST_BACKUP_LIMIT, alias: 'l'}).argv\n}\n\nconst listDatasetBackupCommand: CliCommandDefinition<ListDatasetBackupFlags> = {\n name: 'list',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'List available backups for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const flags = await parseCliFlags(args)\n const [dataset] = args.argsWithoutOptions\n\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n const query: ListBackupRequestQueryParams = {limit: DEFAULT_LIST_BACKUP_LIMIT.toString()}\n if (flags.limit) {\n // We allow limit up to Number.MAX_SAFE_INTEGER to leave it for server-side validation,\n // while still sending sensible value in limit string.\n if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER) {\n throw new Error(\n `Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`,\n )\n }\n query.limit = flags.limit.toString()\n }\n\n if (flags.before || flags.after) {\n try {\n const parsedBefore = processDateFlags(flags.before)\n const parsedAfter = processDateFlags(flags.after)\n\n if (parsedAfter && parsedBefore && isAfter(parsedAfter, parsedBefore)) {\n throw new Error('--after date must be before --before')\n }\n\n query.before = flags.before\n query.after = flags.after\n } catch (err) {\n throw new Error(`Parsing date flags: ${err}`)\n }\n }\n\n let response\n try {\n response = await client.request<ListBackupResponse>({\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/backups`,\n query: {...query},\n })\n } catch (error) {\n const {message} = parseApiErr(error)\n output.error(`${chalk.red(`List dataset backup failed: ${message}`)}\\n`)\n }\n\n if (response && response.backups) {\n if (response.backups.length === 0) {\n output.print('No backups found.')\n return\n }\n\n const table = new Table({\n columns: [\n {name: 'resource', title: 'RESOURCE', alignment: 'left'},\n {name: 'createdAt', title: 'CREATED AT', alignment: 'left'},\n {name: 'backupId', title: 'BACKUP ID', alignment: 'left'},\n ],\n })\n\n response.backups.forEach((backup: ListBackupResponseItem) => {\n const {id, createdAt} = backup\n table.addRow({\n resource: 'Dataset',\n createdAt: lightFormat(Date.parse(createdAt), 'yyyy-MM-dd HH:mm:ss'),\n backupId: id,\n })\n })\n\n table.printTable()\n }\n },\n}\n\nfunction processDateFlags(date: string | undefined): Date | undefined {\n if (!date) return undefined\n const parsedDate = parse(date, 'yyyy-MM-dd', new Date())\n if (isValid(parsedDate)) {\n return parsedDate\n }\n\n throw new Error(`Invalid ${date} date format. Use YYYY-MM-DD`)\n}\n\nexport default listDatasetBackupCommand\n","import type {CliCommandArguments, CliCommandContext, CliCommandDefinition} from '@sanity/cli'\nimport {BuildSanityStudioCommandFlags} from '../../actions/build/buildAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --auto-updates / --no-auto-updates Enable/disable auto updates of studio versions\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n --schema-path If you are storing your schemas in a different path than the default one, you need to specify it here.\n\nExamples\n sanity build\n sanity build --no-minify --source-maps\n`\n\nconst buildCommand: CliCommandDefinition = {\n name: 'build',\n signature: '[OUTPUT_DIR]',\n description: 'Builds the Sanity Studio configuration into a static bundle',\n action: async (\n args: CliCommandArguments<BuildSanityStudioCommandFlags>,\n context: CliCommandContext,\n overrides?: {basePath?: string},\n ) => {\n const buildAction = await getBuildAction()\n\n return buildAction(args, context, overrides)\n },\n helpText,\n}\n\nasync function getBuildAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions\n const mod: typeof import('../../actions/build/buildAction') = require('../../actions/build/buildAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/build/buildAction')\n\n return mod.default\n}\n\nexport default buildCommand\n","import url from 'node:url'\n\nimport {type CliCommandContext, type CliPrompter} from '@sanity/cli'\nimport logSymbols from 'log-symbols'\nimport oneline from 'oneline'\n\nconst wildcardReplacement = 'a-wild-card-r3pl4c3m3n7-a'\nconst portReplacement = ':7777777'\n\ninterface AddCorsOriginFlags {\n credentials?: boolean\n}\n\nexport async function addCorsOrigin(\n givenOrigin: string,\n flags: AddCorsOriginFlags,\n context: CliCommandContext,\n): Promise<boolean> {\n const {apiClient, prompt, output} = context\n const origin = await (givenOrigin\n ? filterAndValidateOrigin(givenOrigin)\n : promptForOrigin(prompt))\n\n const hasWildcard = origin.includes('*')\n if (hasWildcard && !(await promptForWildcardConfirmation(origin, context))) {\n return false\n }\n const allowCredentials =\n typeof flags.credentials === 'undefined'\n ? await promptForCredentials(hasWildcard, context)\n : Boolean(flags.credentials)\n\n if (givenOrigin !== origin) {\n output.print(`Normalized origin to ${origin}`)\n }\n\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n })\n\n await client.request({\n method: 'POST',\n url: '/cors',\n body: {origin, allowCredentials},\n maxRedirects: 0,\n })\n\n return true\n}\n\nfunction promptForCredentials(hasWildcard: boolean, context: CliCommandContext): Promise<string> {\n const {prompt, output, chalk} = context\n\n output.print('')\n if (hasWildcard) {\n output.print(oneline`\n ${chalk.yellow(`${logSymbols.warning} Warning:`)}\n We ${chalk.red(chalk.underline('HIGHLY'))} recommend NOT allowing credentials\n on origins containing wildcards. If you are logged in to a studio, people will\n be able to send requests ${chalk.underline('on your behalf')} to read and modify\n data, from any matching origin. Please tread carefully!\n `)\n } else {\n output.print(oneline`\n ${chalk.yellow(`${logSymbols.warning} Warning:`)}\n Should this origin be allowed to send requests using authentication tokens or\n session cookies? Be aware that any script on this origin will be able to send\n requests ${chalk.underline('on your behalf')} to read and modify data if you\n are logged in to a Sanity studio. If this origin hosts a studio, you will need\n this, otherwise you should probably answer \"No\" (n).\n `)\n }\n\n output.print('')\n\n return prompt.single({\n type: 'confirm',\n message: oneline`\n Allow credentials to be sent from this origin? Please read the warning above.\n `,\n default: false,\n })\n}\n\nfunction promptForWildcardConfirmation(\n origin: string,\n context: CliCommandContext,\n): Promise<boolean> {\n const {prompt, output, chalk} = context\n\n output.print('')\n output.print(chalk.yellow(`${logSymbols.warning} Warning: Examples of allowed origins:`))\n\n if (origin === '*') {\n output.print('- http://www.some-malicious.site')\n output.print('- https://not.what-you-were-expecting.com')\n output.print('- https://high-traffic-site.com')\n output.print('- http://192.168.1.1:8080')\n } else {\n output.print(`- ${origin.replace(/:\\*/, ':1234').replace(/\\*/g, 'foo')}`)\n output.print(`- ${origin.replace(/:\\*/, ':3030').replace(/\\*/g, 'foo.bar')}`)\n }\n\n output.print('')\n\n return prompt.single({\n type: 'confirm',\n message: oneline`\n Using wildcards can be ${chalk.red('risky')}.\n Are you ${chalk.underline('absolutely sure')} you want to allow this origin?`,\n default: false,\n })\n}\n\nfunction promptForOrigin(prompt: CliPrompter): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Origin (including protocol):',\n filter: filterOrigin,\n validate: (origin) => validateOrigin(origin, origin),\n })\n}\n\nfunction filterOrigin(origin: string): string | null {\n if (origin === '*' || origin === 'file:///*' || origin === 'null') {\n return origin\n }\n\n try {\n const example = origin\n .replace(/([^:])\\*/g, `$1${wildcardReplacement}`)\n .replace(/:\\*/, portReplacement)\n\n const parsed = url.parse(example)\n let host = parsed.host || ''\n if (/^https?:$/.test(parsed.protocol || '')) {\n host = host.replace(/:(80|443)$/, '')\n }\n\n host = host.replace(portReplacement, ':*').replace(new RegExp(wildcardReplacement, 'g'), '*')\n\n return `${parsed.protocol}//${host}`\n } catch (err) {\n return null\n }\n}\n\nfunction validateOrigin(origin: string | null, givenOrigin: string): true | string {\n if (origin === '*' || origin === 'file:///*' || origin === 'null') {\n return true\n }\n\n try {\n url.parse(origin || (0 as any as string)) // Use 0 to trigger error for unset values\n return true\n } catch (err) {\n // Fall-through to error\n }\n\n if (/^file:\\/\\//.test(givenOrigin)) {\n return `Only a local file wildcard is currently allowed: file:///*`\n }\n\n return `Invalid origin \"${givenOrigin}\", must include protocol (https://some.host)`\n}\n\nfunction filterAndValidateOrigin(givenOrigin: string): string {\n const origin = filterOrigin(givenOrigin)\n const result = validateOrigin(origin, givenOrigin)\n if (result !== true) {\n throw new Error(result)\n }\n\n if (!origin) {\n throw new Error('Invalid origin')\n }\n\n return origin\n}\n","import fs from 'node:fs'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\n\nimport {addCorsOrigin} from '../../actions/cors/addCorsOrigin'\n\nconst helpText = `\nOptions\n --credentials Allow credentials (token/cookie) to be sent from this origin\n --no-credentials Disallow credentials (token/cookie) to be sent from this origin\n\nExamples\n sanity cors add\n sanity cors add http://localhost:3000 --no-credentials\n`\n\nconst addCorsOriginCommand: CliCommandDefinition = {\n name: 'add',\n group: 'cors',\n signature: '[ORIGIN]',\n helpText,\n description: 'Allow a new origin to use your project API through CORS',\n action: async (args, context) => {\n const {output} = context\n const [origin] = args.argsWithoutOptions\n\n if (!origin) {\n throw new Error('No origin specified, use `sanity cors add <origin-url>`')\n }\n\n const flags = args.extOptions\n\n // eslint-disable-next-line no-sync\n const isFile = fs.existsSync(path.join(process.cwd(), origin))\n if (isFile) {\n output.warn(`Origin \"${origin}?\" Remember to quote values (sanity cors add \"*\")`)\n }\n\n const success = await addCorsOrigin(origin, flags, context)\n if (success) {\n output.print('CORS origin added successfully')\n }\n },\n}\n\nexport default addCorsOriginCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst corsGroup: CliCommandGroupDefinition = {\n name: 'cors',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Configures CORS settings for Sanity projects',\n}\n\nexport default corsGroup\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type CorsOrigin} from './types'\n\nconst helpText = `\nExamples\n sanity cors delete\n sanity cors delete http://localhost:3000\n`\n\nconst deleteCorsOriginCommand: CliCommandDefinition = {\n name: 'delete',\n group: 'cors',\n signature: '[ORIGIN]',\n helpText,\n description: 'Delete an existing CORS-origin from your project',\n action: async (args, context) => {\n const {output, apiClient} = context\n const [origin] = args.argsWithoutOptions\n const client = apiClient({requireUser: true, requireProject: true})\n const originId = await promptForOrigin(origin, context)\n try {\n await client.request({method: 'DELETE', uri: `/cors/${originId}`})\n output.print('Origin deleted')\n } catch (err) {\n throw new Error(`Origin deletion failed:\\n${err.message}`)\n }\n },\n}\n\nexport default deleteCorsOriginCommand\n\nasync function promptForOrigin(specified: string | undefined, context: CliCommandContext) {\n const specifiedOrigin = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient({requireUser: true, requireProject: true})\n\n const origins = await client.request<CorsOrigin[]>({url: '/cors'})\n if (specifiedOrigin) {\n const selected = origins.filter((origin) => origin.origin.toLowerCase() === specifiedOrigin)[0]\n if (!selected) {\n throw new Error(`Origin \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n const choices = origins.map((origin) => ({value: origin.id, name: origin.origin}))\n return prompt.single({\n message: 'Select origin to delete',\n type: 'list',\n choices,\n })\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type CorsOrigin} from './types'\n\nconst helpText = `\nExamples\n sanity cors list\n`\n\nconst listCorsOriginsCommand: CliCommandDefinition = {\n name: 'list',\n group: 'cors',\n signature: '',\n helpText,\n description: 'List all origins allowed to access the API for this project',\n action: async (args, context) => {\n const {output} = context\n const {apiClient} = context\n const client = apiClient({requireUser: true, requireProject: true})\n const origins = await client.request<CorsOrigin[]>({url: '/cors'})\n output.print(origins.map((origin) => origin.origin).join('\\n'))\n },\n}\n\nexport default listCorsOriginsCommand\n","const MAX_DATASET_NAME_LENGTH = 64\n\nexport function validateDatasetAliasName(datasetName: string): false | string {\n if (!datasetName) {\n return 'Alias name is missing'\n }\n\n const name = `${datasetName}`\n\n if (name.toLowerCase() !== name) {\n return 'Alias name must be all lowercase characters'\n }\n\n if (name.length < 2) {\n return 'Alias name must be at least two characters long'\n }\n\n if (name.length > MAX_DATASET_NAME_LENGTH) {\n return `Alias name must be at most ${MAX_DATASET_NAME_LENGTH} characters`\n }\n\n if (!/^[a-z0-9~]/.test(name)) {\n return 'Alias name must start with a letter or a number'\n }\n\n if (!/^[a-z0-9~][-_a-z0-9]+$/.test(name)) {\n return 'Alias name must only contain letters, numbers, dashes and underscores'\n }\n\n if (/[-_]$/.test(name)) {\n return 'Alias name must not end with a dash or an underscore'\n }\n\n return false\n}\n","import {type CliPrompter} from '@sanity/cli'\n\nimport {validateDatasetAliasName} from './validateDatasetAliasName'\n\nexport function promptForDatasetAliasName(\n prompt: CliPrompter,\n options: {message?: string; default?: string} = {},\n): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Alias name:',\n validate: (name) => {\n const err = validateDatasetAliasName(name)\n if (err) {\n return err\n }\n\n return true\n },\n ...options,\n })\n}\n","import {type SanityClient} from '@sanity/client'\n\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {type DatasetAliasDefinition, type DatasetModificationResponse} from './types'\n\nexport const ALIAS_PREFIX = '~'\n\nexport function listAliases(client: SanityClient): Promise<DatasetAliasDefinition[]> {\n return client.request<DatasetAliasDefinition[]>({uri: '/aliases'})\n}\n\nexport function createAlias(\n client: SanityClient,\n aliasName: string,\n datasetName: string | null,\n): Promise<DatasetModificationResponse> {\n return modify(client, 'PUT', aliasName, datasetName ? {datasetName} : undefined)\n}\n\nexport function updateAlias(\n client: SanityClient,\n aliasName: string,\n datasetName: string | null,\n): Promise<DatasetModificationResponse> {\n return modify(client, 'PATCH', aliasName, datasetName ? {datasetName} : undefined)\n}\n\nexport function unlinkAlias(\n client: SanityClient,\n aliasName: string,\n): Promise<DatasetModificationResponse> {\n validateDatasetAliasName(aliasName)\n return modify(client, 'PATCH', `${aliasName}/unlink`, {})\n}\n\nexport function removeAlias(client: SanityClient, aliasName: string): Promise<{deleted: boolean}> {\n return modify(client, 'DELETE', aliasName)\n}\n\nfunction modify(\n client: SanityClient,\n method: string,\n aliasName: string,\n body?: {datasetName?: string},\n) {\n return client.request({method, uri: `/aliases/${aliasName}`, body})\n}\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {promptForDatasetName} from '../../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../../actions/dataset/validateDatasetName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const createAliasHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias, targetDataset] = args.argsWithoutOptions\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, aliases, projectFeatures] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n aliasClient.listAliases(client).then((sets) => sets.map((ds) => ds.name)),\n client.request({uri: '/features'}),\n ])\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n if (aliases.includes(aliasName)) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" already exists`)\n }\n\n if (targetDataset) {\n const datasetErr = validateDatasetName(targetDataset)\n if (datasetErr) {\n throw new Error(datasetErr)\n }\n }\n\n const datasetName = await (targetDataset || promptForDatasetName(prompt))\n if (datasetName && !datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" does not exist `)\n }\n\n const canCreateAlias = projectFeatures.includes('advancedDatasetManagement')\n if (!canCreateAlias) {\n throw new Error(`This project cannot create a dataset alias`)\n }\n\n try {\n await aliasClient.createAlias(client, aliasName, datasetName)\n output.print(\n `Dataset alias ${aliasOutputName} created ${\n datasetName && `and linked to ${datasetName}`\n } successfully`,\n )\n } catch (err) {\n throw new Error(`Dataset alias creation failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandAction} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\ninterface DeleteAliasFlags {\n force?: boolean\n}\n\nexport const deleteAliasHandler: CliCommandAction<DeleteAliasFlags> = async (args, context) => {\n const {apiClient, prompt, output} = context\n const [, ds] = args.argsWithoutOptions\n const {force} = await parseCliFlags(args)\n const client = apiClient()\n if (!ds) {\n throw new Error('Dataset alias name must be provided')\n }\n\n let aliasName = `${ds}`\n const dsError = validateDatasetAliasName(aliasName)\n if (dsError) {\n throw dsError\n }\n aliasName = aliasName.startsWith(ALIAS_PREFIX) ? aliasName.slice(1) : aliasName\n\n const [fetchedAliases] = await Promise.all([aliasClient.listAliases(client)])\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n const message =\n linkedAlias && linkedAlias.datasetName\n ? `This dataset alias is linked to ${linkedAlias.datasetName}. `\n : ''\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, deleting alias \"${aliasName}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message: `${message}Are you ABSOLUTELY sure you want to delete this dataset alias?\\n Type the name of the dataset alias to confirm delete: `,\n filter: (input) => `${input}`.trim(),\n validate: (input) => {\n return input === aliasName || 'Incorrect dataset alias name. Ctrl + C to cancel delete.'\n },\n })\n }\n\n return aliasClient.removeAlias(client, aliasName).then(() => {\n output.print('Dataset alias deleted successfully')\n })\n}\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {promptForDatasetName} from '../../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../../actions/dataset/validateDatasetName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const linkAliasHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias, targetDataset] = args.argsWithoutOptions\n const flags = args.extOptions\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, fetchedAliases] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n aliasClient.listAliases(client),\n ])\n const aliases = fetchedAliases.map((da) => da.name)\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n if (!aliases.includes(aliasName)) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" does not exist `)\n }\n\n const datasetName = await (targetDataset || promptForDatasetName(prompt))\n const datasetErr = validateDatasetName(datasetName)\n if (datasetErr) {\n throw new Error(datasetErr)\n }\n\n if (!datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" does not exist `)\n }\n\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n\n if (linkedAlias && linkedAlias.datasetName) {\n if (linkedAlias.datasetName === datasetName) {\n throw new Error(`Dataset alias ${aliasOutputName} already linked to ${datasetName}`)\n }\n\n if (!flags.force) {\n await prompt.single({\n type: 'input',\n message: `This alias is linked to dataset <${linkedAlias.datasetName}>. Are you ABSOLUTELY sure you want to link this dataset alias to this dataset?\n \\n Type YES/NO: `,\n filter: (input) => `${input}`.toLowerCase(),\n validate: (input) => {\n return input === 'yes' || 'Ctrl + C to cancel dataset alias link.'\n },\n })\n }\n }\n\n try {\n await aliasClient.updateAlias(client, aliasName, datasetName)\n output.print(`Dataset alias ${aliasOutputName} linked to ${datasetName} successfully`)\n } catch (err) {\n throw new Error(`Dataset alias link failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandAction} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\ninterface UnlinkFlags {\n force?: boolean\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\nexport const unlinkAliasHandler: CliCommandAction<UnlinkFlags> = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias] = args.argsWithoutOptions\n const {force} = await parseCliFlags(args)\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const fetchedAliases = await aliasClient.listAliases(client)\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n // get the current alias from the remote alias list\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n if (!linkedAlias) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" does not exist`)\n }\n\n if (!linkedAlias.datasetName) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" is not linked to a dataset`)\n }\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, unlinking alias \"${aliasOutputName}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message: `Are you ABSOLUTELY sure you want to unlink this alias from the \"${linkedAlias.datasetName}\" dataset?\n \\n Type YES/NO: `,\n filter: (input) => `${input}`.toLowerCase(),\n validate: (input) => {\n return input === 'yes' || 'Ctrl + C to cancel dataset alias unlink.'\n },\n })\n }\n\n try {\n const result = await aliasClient.unlinkAlias(client, aliasName)\n output.print(\n `Dataset alias ${aliasOutputName} unlinked from ${result.datasetName} successfully`,\n )\n } catch (err) {\n throw new Error(`Dataset alias unlink failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport oneline from 'oneline'\n\nimport {createAliasHandler} from './createAliasHandler'\nimport {deleteAliasHandler} from './deleteAliasHandler'\nimport {linkAliasHandler} from './linkAliasHandler'\nimport {unlinkAliasHandler} from './unlinkAliasHandler'\n\nconst helpText = `\nBelow are examples of the alias subcommand\n\nCreate Alias\n sanity dataset alias create\n sanity dataset alias create <alias-name>\n sanity dataset alias create <alias-name> <target-dataset>\n\nDelete Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias delete <alias-name>\n sanity dataset alias delete <alias-name> --force\n\nLink Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias link\n sanity dataset alias link <alias-name>\n sanity dataset alias link <alias-name> <target-dataset>\n sanity dataset alias link <alias-name> <target-dataset> --force\n\nUn-link Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias unlink\n sanity dataset alias unlink <alias-name>\n sanity dataset alias unlink <alias-name> --force\n`\n\nconst aliasCommand: CliCommandDefinition = {\n name: 'alias',\n group: 'dataset',\n signature: 'SUBCOMMAND [ALIAS_NAME, TARGET_DATASET]',\n helpText,\n description: 'You can manage your dataset alias using this command.',\n action: async (args, context) => {\n const [verb] = args.argsWithoutOptions\n switch (verb) {\n case 'create':\n await createAliasHandler(args, context)\n break\n case 'delete':\n await deleteAliasHandler(args, context)\n break\n case 'unlink':\n await unlinkAliasHandler(args, context)\n break\n case 'link':\n await linkAliasHandler(args, context)\n break\n default:\n throw new Error(oneline`\n Invalid command provided. Available commands are: create, delete, link and unlink.\n For more guide run the help command 'sanity dataset alias --help'\n `)\n }\n },\n}\n\nexport default aliasCommand\n","import {type CliCommandContext} from '@sanity/cli'\nimport {Table} from 'console-table-printer'\nimport {formatDistance, formatDistanceToNow, parseISO} from 'date-fns'\n\ninterface ListFlags {\n offset?: number\n limit?: number\n}\n\ntype CopyDatasetListResponse = {\n id: string\n state: string\n createdAt: string\n updatedAt: string\n sourceDataset: string\n targetDataset: string\n withHistory: boolean\n}[]\n\nexport async function listDatasetCopyJobs(\n flags: ListFlags,\n context: CliCommandContext,\n): Promise<void> {\n const {apiClient, output, chalk} = context\n const client = apiClient()\n const projectId = client.config().projectId\n const query: {offset?: string; limit?: string} = {}\n let response\n\n if (flags.offset && flags.offset >= 0) {\n query.offset = `${flags.offset}`\n }\n if (flags.limit && flags.limit > 0) {\n query.limit = `${flags.limit}`\n }\n\n try {\n response = await client.request<CopyDatasetListResponse>({\n method: 'GET',\n uri: `/projects/${projectId}/datasets/copy`,\n query,\n })\n } catch (error) {\n if (error.statusCode) {\n output.error(`${chalk.red(`Dataset copy list failed:\\n${error.response.body.message}`)}\\n`)\n } else {\n output.error(`${chalk.red(`Dataset copy list failed:\\n${error.message}`)}\\n`)\n }\n }\n\n if (response && response.length > 0) {\n const table = new Table({\n title: 'Dataset copy jobs for this project in descending order',\n columns: [\n {name: 'id', title: 'Job ID', alignment: 'left'},\n {name: 'sourceDataset', title: 'Source Dataset', alignment: 'left'},\n {name: 'targetDataset', title: 'Target Dataset', alignment: 'left'},\n {name: 'state', title: 'State', alignment: 'left'},\n {name: 'withHistory', title: 'With history', alignment: 'left'},\n {name: 'timeStarted', title: 'Time started', alignment: 'left'},\n {name: 'timeTaken', title: 'Time taken', alignment: 'left'},\n ],\n })\n\n response.forEach((job) => {\n const {id, state, createdAt, updatedAt, sourceDataset, targetDataset, withHistory} = job\n\n let timeStarted = ''\n if (createdAt !== '') {\n timeStarted = formatDistanceToNow(parseISO(createdAt))\n }\n\n let timeTaken = ''\n if (updatedAt !== '') {\n timeTaken = formatDistance(parseISO(updatedAt), parseISO(createdAt))\n }\n\n let color\n switch (state) {\n case 'completed':\n color = 'green'\n break\n case 'failed':\n color = 'red'\n break\n case 'pending':\n color = 'yellow'\n break\n default:\n color = ''\n }\n\n table.addRow(\n {\n id,\n state,\n withHistory,\n timeStarted: `${timeStarted} ago`,\n timeTaken,\n sourceDataset,\n targetDataset,\n },\n {color},\n )\n })\n\n table.printTable()\n } else {\n output.print(\"This project doesn't have any dataset copy jobs\")\n }\n}\n","import {type SanityClient} from '@sanity/client'\n\nexport const getClientUrl = (client: SanityClient, uri: string, useCdn = false): string => {\n const config = client.config()\n const base = useCdn ? config.cdnUrl : config.url\n return `${base}/${uri.replace(/^\\//, '')}`\n}\n","import {type CliCommandDefinition, type CliOutputter} from '@sanity/cli'\nimport {type SanityClient} from '@sanity/client'\nimport EventSource from '@sanity/eventsource'\nimport {Observable} from 'rxjs'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {promptForDatasetName} from '../../actions/dataset/datasetNamePrompt'\nimport {listDatasetCopyJobs} from '../../actions/dataset/listDatasetCopyJobs'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\nimport {getClientUrl} from '../../util/getClientUrl'\n\nconst helpText = `\nOptions\n --detach Start the copy without waiting for it to finish\n --attach <job-id> Attach to the running copy process to show progress\n --skip-history Don't preserve document history on copy\n --list Lists all dataset copy jobs corresponding to a certain criteria.\n --offset Start position in the list of jobs. Default 0. With --list.\n --limit Maximum number of jobs returned. Default 10. Maximum 1000. With --list.\n\nExamples\n sanity dataset copy\n sanity dataset copy <source-dataset>\n sanity dataset copy <source-dataset> <target-dataset>\n sanity dataset copy --skip-history <source-dataset> <target-dataset>\n sanity dataset copy --detach <source-dataset> <target-dataset>\n sanity dataset copy --attach <job-id>\n sanity dataset copy --list\n sanity dataset copy --list --offset=2\n sanity dataset copy --list --offset=2 --limit=10\n`\n\ninterface CopyProgressStreamEvent {\n type: 'reconnect' | string\n progress?: number\n}\n\ninterface CopyDatasetFlags {\n 'list'?: boolean\n 'attach'?: string\n 'detach'?: boolean\n 'offset'?: number\n 'limit'?: number\n 'skip-history'?: boolean\n}\n\ninterface CopyDatasetResponse {\n jobId: string\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .option('attach', {type: 'string'})\n .option('list', {type: 'boolean'})\n .option('limit', {type: 'number'})\n .option('offset', {type: 'number'})\n .option('skip-history', {type: 'boolean'})\n .option('detach', {type: 'boolean'}).argv\n}\n\nconst progress = (url: string) => {\n return new Observable<CopyProgressStreamEvent>((observer) => {\n let progressSource = new EventSource(url)\n let stopped = false\n\n function onError(error: unknown) {\n if (progressSource) {\n progressSource.close()\n }\n\n debug(`Error received: ${error}`)\n if (stopped) {\n return\n }\n observer.next({type: 'reconnect'})\n progressSource = new EventSource(url)\n }\n\n function onChannelError(error: MessageEvent) {\n stopped = true\n progressSource.close()\n observer.error(error)\n }\n\n function onMessage(event: MessageEvent) {\n const data = JSON.parse(event.data)\n if (data.state === 'failed') {\n debug('Job failed. Data: %o', event)\n observer.error(event)\n } else if (data.state === 'completed') {\n debug('Job succeeded. Data: %o', event)\n onComplete()\n } else {\n debug(`Job progressed. Data: %o`, event)\n observer.next(data)\n }\n }\n\n function onComplete() {\n progressSource.removeEventListener('error', onError)\n progressSource.removeEventListener('channel_error', onChannelError)\n progressSource.removeEventListener('job', onMessage)\n progressSource.removeEventListener('done', onComplete)\n progressSource.close()\n observer.complete()\n }\n\n progressSource.addEventListener('error', onError)\n progressSource.addEventListener('channel_error', onChannelError)\n progressSource.addEventListener('job', onMessage)\n progressSource.addEventListener('done', onComplete)\n })\n}\n\nconst followProgress = (\n jobId: string,\n client: SanityClient,\n output: CliOutputter,\n): Promise<void> => {\n let currentProgress = 0\n\n const spinner = output.spinner({}).start()\n const listenUrl = getClientUrl(client, `jobs/${jobId}/listen`)\n\n debug(`Listening to ${listenUrl}`)\n\n return new Promise((resolve, reject) => {\n progress(listenUrl).subscribe({\n next: (event) => {\n if (typeof event.progress === 'number') {\n currentProgress = event.progress\n }\n\n spinner.text = `Copy in progress: ${currentProgress}%`\n },\n error: (err) => {\n spinner.fail()\n reject(new Error(`${err.data}`))\n },\n complete: () => {\n spinner.succeed('Copy finished.')\n resolve()\n },\n })\n })\n}\n\nconst copyDatasetCommand: CliCommandDefinition<CopyDatasetFlags> = {\n name: 'copy',\n group: 'dataset',\n signature: '[SOURCE_DATASET] [TARGET_DATASET]',\n helpText,\n description:\n 'Manages dataset copying, including starting a new copy job, listing copy jobs and following the progress of a running copy job',\n action: async (args, context) => {\n const {apiClient, output, prompt, chalk} = context\n // Reparsing CLI flags for better control of binary flags\n const flags: CopyDatasetFlags = await parseCliFlags(args)\n const client = apiClient()\n\n if (flags.list) {\n await listDatasetCopyJobs(flags, context)\n return\n }\n\n if (flags.attach) {\n const jobId = flags.attach\n\n if (!jobId) {\n throw new Error('Please supply a jobId')\n }\n\n await followProgress(jobId, client, output)\n return\n }\n\n const [sourceDataset, targetDataset] = args.argsWithoutOptions\n const shouldSkipHistory = Boolean(flags['skip-history'])\n\n const nameError = sourceDataset && validateDatasetName(sourceDataset)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const existingDatasets = await client.datasets\n .list()\n .then((datasets) => datasets.map((ds) => ds.name))\n\n const sourceDatasetName = await (sourceDataset ||\n promptForDatasetName(prompt, {message: 'Source dataset name:'}))\n if (!existingDatasets.includes(sourceDatasetName)) {\n throw new Error(`Source dataset \"${sourceDatasetName}\" doesn't exist`)\n }\n\n const targetDatasetName = await (targetDataset ||\n promptForDatasetName(prompt, {message: 'Target dataset name:'}))\n if (existingDatasets.includes(targetDatasetName)) {\n throw new Error(`Target dataset \"${targetDatasetName}\" already exists`)\n }\n\n const err = validateDatasetName(targetDatasetName)\n if (err) {\n throw new Error(err)\n }\n\n try {\n const response = await client.request<CopyDatasetResponse>({\n method: 'PUT',\n uri: `/datasets/${sourceDatasetName}/copy`,\n body: {\n targetDataset: targetDatasetName,\n skipHistory: shouldSkipHistory,\n },\n })\n\n output.print(\n `Copying dataset ${chalk.green(sourceDatasetName)} to ${chalk.green(targetDatasetName)}...`,\n )\n\n if (!shouldSkipHistory) {\n output.print(\n `Note: You can run this command with flag '--skip-history'. The flag will reduce copy time in larger datasets.`,\n )\n }\n\n output.print(`Job ${chalk.green(response.jobId)} started`)\n\n if (flags.detach) {\n return\n }\n\n await followProgress(response.jobId, client, output)\n output.print(`Job ${chalk.green(response.jobId)} completed`)\n } catch (error) {\n if (error.statusCode) {\n output.print(`${chalk.red(`Dataset copying failed:\\n${error.response.body.message}`)}\\n`)\n } else {\n output.print(`${chalk.red(`Dataset copying failed:\\n${error.message}`)}\\n`)\n }\n }\n },\n}\n\nexport default copyDatasetCommand\n","import {type CliCommandDefinition, type CliOutputter, type CliPrompter} from '@sanity/cli'\n\nimport {promptForDatasetName} from '../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\n\nconst helpText = `\nOptions\n --visibility <mode> Set visibility for this dataset (public/private)\n\nExamples\n sanity dataset create\n sanity dataset create <name>\n sanity dataset create <name> --visibility private\n`\n\nconst allowedModes = ['private', 'public', 'custom']\n\ninterface CreateFlags {\n visibility?: 'private' | 'public' | 'custom'\n}\n\nconst createDatasetCommand: CliCommandDefinition<CreateFlags> = {\n name: 'create',\n group: 'dataset',\n signature: '[NAME]',\n helpText,\n description: 'Create a new dataset within your project',\n action: async (args, context) => {\n const {apiClient, output, prompt} = context\n const flags = args.extOptions\n const [dataset] = args.argsWithoutOptions\n const client = apiClient()\n\n const nameError = dataset && validateDatasetName(dataset)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, projectFeatures] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n client.request({uri: '/features'}),\n ])\n\n if (flags.visibility && !allowedModes.includes(flags.visibility)) {\n throw new Error(`Visibility mode \"${flags.visibility}\" not allowed`)\n }\n\n const datasetName = await (dataset || promptForDatasetName(prompt))\n if (datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" already exists`)\n }\n\n const canCreatePrivate = projectFeatures.includes('privateDataset')\n debug('%s create private datasets', canCreatePrivate ? 'Can' : 'Cannot')\n\n const defaultAclMode = canCreatePrivate ? flags.visibility : 'public'\n const aclMode = await (defaultAclMode || promptForDatasetVisibility(prompt, output))\n\n try {\n await client.datasets.create(datasetName, {aclMode})\n output.print('Dataset created successfully')\n } catch (err) {\n throw new Error(`Dataset creation failed:\\n${err.message}`)\n }\n },\n}\n\nasync function promptForDatasetVisibility(prompt: CliPrompter, output: CliOutputter) {\n const mode = await prompt.single<'public' | 'private'>({\n type: 'list',\n message: 'Dataset visibility',\n choices: [\n {\n value: 'public',\n name: 'Public (world readable)',\n },\n {\n value: 'private',\n name: 'Private (Authenticated user or token needed)',\n },\n ],\n })\n\n if (mode === 'private') {\n output.print(\n 'Please note that while documents are private, assets (files and images) are still public\\n',\n )\n }\n\n return mode\n}\n\nexport default createDatasetCommand\n","export default {\n name: 'dataset',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages datasets, like create or delete, within projects',\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst datasetVisibilityCommand: CliCommandDefinition = {\n name: 'visibility',\n group: 'dataset',\n helpText: '',\n signature: 'get/set [dataset] [mode]',\n description: 'Set visibility of a dataset',\n action: async (args, context) => {\n const {apiClient, output} = context\n const [action, ds, aclMode] = args.argsWithoutOptions\n const client = apiClient()\n\n if (!client.datasets.edit) {\n throw new Error('@sanity/cli must be upgraded first:\\n npm install -g @sanity/cli')\n }\n\n if (!action) {\n throw new Error('Action must be provided (get/set)')\n }\n\n if (!['set', 'get'].includes(action)) {\n throw new Error('Invalid action (only get/set allowed)')\n }\n\n if (!ds) {\n throw new Error('Dataset name must be provided')\n }\n\n if (action === 'set' && !aclMode) {\n throw new Error('Please provide a visibility mode (public/private)')\n }\n\n const dataset = `${ds}`\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw new Error(dsError)\n }\n\n const current = (await client.datasets.list()).find((curr) => curr.name === dataset)\n\n if (!current) {\n throw new Error('Dataset not found')\n }\n\n if (action === 'get') {\n output.print(current.aclMode)\n return\n }\n\n if (current.aclMode === aclMode) {\n output.print(`Dataset already in \"${aclMode}\"-mode`)\n return\n }\n\n if (aclMode === 'private') {\n output.print(\n 'Please note that while documents are private, assets (files and images) are still public\\n',\n )\n }\n\n await client.datasets.edit(dataset, {aclMode: aclMode as 'public' | 'private'})\n output.print('Dataset visibility changed')\n },\n}\n\nexport default datasetVisibilityCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst helpText = `\nOptions\n --force Do not prompt for delete confirmation - forcefully delete\n\nExamples\n sanity dataset delete\n sanity dataset delete my-dataset\n sanity dataset delete my-dataset --force\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\ninterface DeleteDatasetFlags {\n force?: boolean\n}\n\nconst deleteDatasetCommand: CliCommandDefinition<DeleteDatasetFlags> = {\n name: 'delete',\n group: 'dataset',\n helpText,\n signature: '[datasetName]',\n description: 'Delete a dataset within your project',\n action: async (args, context) => {\n const {apiClient, prompt, output} = context\n const {force} = await parseCliFlags(args)\n const [ds] = args.argsWithoutOptions\n if (!ds) {\n throw new Error('Dataset name must be provided')\n }\n\n const dataset = `${ds}`\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw dsError\n }\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, deleting dataset \"${dataset}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message:\n 'Are you ABSOLUTELY sure you want to delete this dataset?\\n Type the name of the dataset to confirm delete:',\n filter: (input) => `${input}`.trim(),\n validate: (input) => {\n return input === dataset || 'Incorrect dataset name. Ctrl + C to cancel delete.'\n },\n })\n }\n\n await apiClient().datasets.delete(dataset)\n output.print('Dataset deleted successfully')\n },\n}\n\nexport default deleteDatasetCommand\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition, type CliPrompter} from '@sanity/cli'\nimport exportDataset from '@sanity/export'\nimport {absolutify} from '@sanity/util/fs'\nimport prettyMs from 'pretty-ms'\n\nimport {chooseDatasetPrompt} from '../../actions/dataset/chooseDatasetPrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst noop = () => null\n\nconst helpText = `\nOptions\n --raw Extract only documents, without rewriting asset references\n --no-assets Export only non-asset documents and remove references to image assets\n --no-drafts Export only published versions of documents\n --no-compress Skips compressing tarball entries (still generates a gzip file)\n --types Defines which document types to export\n --overwrite Overwrite any file with the same name\n --asset-concurrency <num> Concurrent number of asset downloads\n --mode <stream|cursor> Uses a cursor when exporting, this might be more performant for larger datasets, but might not be as accurate if the dataset is being modified during export. Defaults to stream\n\nExamples\n sanity dataset export moviedb localPath.tar.gz\n sanity dataset export moviedb assetless.tar.gz --no-assets\n sanity dataset export staging staging.tar.gz --raw\n sanity dataset export staging staging.tar.gz --types products,shops\n`\n\ninterface ExportFlags {\n 'raw'?: boolean\n 'assets'?: boolean\n 'drafts'?: boolean\n 'compress'?: boolean\n 'overwrite'?: boolean\n 'types'?: string\n 'asset-concurrency'?: string\n 'mode'?: string\n}\n\ninterface ParsedExportFlags {\n raw?: boolean\n assets?: boolean\n drafts?: boolean\n compress?: boolean\n overwrite?: boolean\n types?: string[]\n assetConcurrency?: number\n mode?: string\n}\n\nfunction parseFlags(rawFlags: ExportFlags): ParsedExportFlags {\n const flags: ParsedExportFlags = {}\n if (rawFlags.types) {\n flags.types = `${rawFlags.types}`.split(',')\n }\n\n if (rawFlags['asset-concurrency']) {\n flags.assetConcurrency = parseInt(rawFlags['asset-concurrency'], 10)\n }\n\n if (typeof rawFlags.raw !== 'undefined') {\n flags.raw = Boolean(rawFlags.raw)\n }\n\n if (typeof rawFlags.assets !== 'undefined') {\n flags.assets = Boolean(rawFlags.assets)\n }\n\n if (typeof rawFlags.drafts !== 'undefined') {\n flags.drafts = Boolean(rawFlags.drafts)\n }\n\n if (typeof rawFlags.compress !== 'undefined') {\n flags.compress = Boolean(rawFlags.compress)\n }\n\n if (typeof rawFlags.overwrite !== 'undefined') {\n flags.overwrite = Boolean(rawFlags.overwrite)\n }\n\n if (typeof rawFlags.mode !== 'undefined') {\n flags.mode = rawFlags.mode\n }\n\n return flags\n}\n\ninterface ProgressEvent {\n step: string\n update?: boolean\n current: number\n total: number\n}\n\nconst exportDatasetCommand: CliCommandDefinition<ExportFlags> = {\n name: 'export',\n group: 'dataset',\n signature: '[NAME] [DESTINATION]',\n description: 'Export dataset to local filesystem as a gzipped tarball',\n helpText,\n action: async (args, context) => {\n const {apiClient, output, chalk, workDir, prompt} = context\n const client = apiClient()\n const [targetDataset, targetDestination] = args.argsWithoutOptions\n const flags = parseFlags(args.extOptions)\n\n let dataset = targetDataset ? `${targetDataset}` : null\n if (!dataset) {\n dataset = await chooseDatasetPrompt(context, {message: 'Select dataset to export'})\n }\n\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw dsError\n }\n\n // Verify existence of dataset before trying to export from it\n const datasets = await client.datasets.list()\n if (!datasets.find((set) => set.name === dataset)) {\n throw new Error(`Dataset with name \"${dataset}\" not found`)\n }\n\n // Print information about what projectId and dataset it is being exported from\n const {projectId} = client.config()\n\n output.print('╭───────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Exporting from: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(44)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(dataset).padEnd(46)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────╯')\n output.print('')\n\n let destinationPath = targetDestination\n if (!destinationPath) {\n destinationPath = await prompt.single({\n type: 'input',\n message: 'Output path:',\n default: path.join(workDir, `${dataset}.tar.gz`),\n filter: absolutify,\n })\n }\n\n const outputPath = await getOutputPath(destinationPath, dataset, prompt, flags)\n if (!outputPath) {\n output.print('Cancelled')\n return\n }\n\n // If we are dumping to a file, let the user know where it's at\n if (outputPath !== '-') {\n output.print(`Exporting dataset \"${chalk.cyan(dataset)}\" to \"${chalk.cyan(outputPath)}\"`)\n }\n\n let currentStep = 'Exporting documents...'\n let spinner = output.spinner(currentStep).start()\n const onProgress = (progress: ProgressEvent) => {\n if (progress.step !== currentStep) {\n spinner.succeed()\n spinner = output.spinner(progress.step).start()\n } else if (progress.step === currentStep && progress.update) {\n spinner.text = `${progress.step} (${progress.current}/${progress.total})`\n }\n\n currentStep = progress.step\n }\n\n const start = Date.now()\n try {\n await exportDataset({\n client,\n dataset,\n outputPath,\n onProgress,\n ...flags,\n })\n spinner.succeed()\n } catch (err) {\n spinner.fail()\n throw err\n }\n\n output.print(`Export finished (${prettyMs(Date.now() - start)})`)\n },\n}\n\n// eslint-disable-next-line complexity\nasync function getOutputPath(\n destination: string,\n dataset: string,\n prompt: CliPrompter,\n flags: ParsedExportFlags,\n) {\n if (destination === '-') {\n return '-'\n }\n\n const dstPath = path.isAbsolute(destination)\n ? destination\n : path.resolve(process.cwd(), destination)\n\n let dstStats = await fs.stat(dstPath).catch(noop)\n const looksLikeFile = dstStats ? dstStats.isFile() : path.basename(dstPath).indexOf('.') !== -1\n\n if (!dstStats) {\n const createPath = looksLikeFile ? path.dirname(dstPath) : dstPath\n\n await fs.mkdir(createPath, {recursive: true})\n }\n\n const finalPath = looksLikeFile ? dstPath : path.join(dstPath, `${dataset}.tar.gz`)\n dstStats = await fs.stat(finalPath).catch(noop)\n\n if (!flags.overwrite && dstStats && dstStats.isFile()) {\n const shouldOverwrite = await prompt.single({\n type: 'confirm',\n message: `File \"${finalPath}\" already exists, would you like to overwrite it?`,\n default: false,\n })\n\n if (!shouldOverwrite) {\n return false\n }\n }\n\n return finalPath\n}\n\nexport default exportDatasetCommand\n","import {createReadStream} from 'node:fs'\nimport fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandContext, type CliCommandDefinition, type CliOutputter} from '@sanity/cli'\nimport sanityImport from '@sanity/import'\nimport {getIt} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {promise} from 'get-it/middleware'\nimport {padStart} from 'lodash'\nimport prettyMs from 'pretty-ms'\n\nimport {chooseDatasetPrompt} from '../../actions/dataset/chooseDatasetPrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\n\nconst yellow = (str: string) => `\\u001b[33m${str}\\u001b[39m`\n\nconst helpText = `\nOptions\n --missing On duplicate document IDs, skip importing document in question\n --replace On duplicate document IDs, replace existing document with imported document\n --allow-failing-assets Skip assets that cannot be fetched/uploaded\n --replace-assets Skip reuse of existing assets\n --skip-cross-dataset-references Skips references to other datasets\n\nRarely used options (should generally not be used)\n --allow-assets-in-different-dataset Allow asset documents to reference different project/dataset\n --allow-system-documents Allow system documents like dataset permissions and custom retention to be imported\n\nExamples\n # Import \"moviedb.ndjson\" from the current directory to the dataset called \"moviedb\"\n sanity dataset import moviedb.ndjson moviedb\n\n # Import \"moviedb.tar.gz\" from the current directory to the dataset called \"moviedb\",\n # replacing any documents encountered that have the same document IDs\n sanity dataset import moviedb.tar.gz moviedb --replace\n\n # Import from a folder containing an ndjson file, such as an extracted tarball\n # retrieved through \"sanity dataset export\".\n sanity dataset import ~/some/folder moviedb\n\n # Import from a remote URL. Will download and extract the tarball to a temporary\n # location before importing it.\n sanity dataset import https://some.url/moviedb.tar.gz moviedb --replace\n`\n\ninterface ImportFlags {\n 'allow-assets-in-different-dataset'?: boolean\n 'allow-failing-assets'?: boolean\n 'asset-concurrency'?: boolean\n 'replace-assets'?: boolean\n 'skip-cross-dataset-references'?: boolean\n 'allow-system-documents'?: boolean\n 'replace'?: boolean\n 'missing'?: boolean\n}\n\ninterface ParsedImportFlags {\n allowAssetsInDifferentDataset?: boolean\n allowFailingAssets?: boolean\n assetConcurrency?: boolean\n skipCrossDatasetReferences?: boolean\n allowSystemDocuments?: boolean\n replaceAssets?: boolean\n replace?: boolean\n missing?: boolean\n}\n\ninterface ProgressEvent {\n step: string\n total?: number\n current?: number\n}\n\ninterface ImportWarning {\n type?: string\n url?: string\n}\n\nfunction toBoolIfSet(flag: unknown): boolean | undefined {\n return typeof flag === 'undefined' ? undefined : Boolean(flag)\n}\n\nfunction parseFlags(rawFlags: ImportFlags): ParsedImportFlags {\n const allowAssetsInDifferentDataset = toBoolIfSet(rawFlags['allow-assets-in-different-dataset'])\n const allowFailingAssets = toBoolIfSet(rawFlags['allow-failing-assets'])\n const assetConcurrency = toBoolIfSet(rawFlags['asset-concurrency'])\n const replaceAssets = toBoolIfSet(rawFlags['replace-assets'])\n const skipCrossDatasetReferences = toBoolIfSet(rawFlags['skip-cross-dataset-references'])\n const allowSystemDocuments = toBoolIfSet(rawFlags['allow-system-documents'])\n const replace = toBoolIfSet(rawFlags.replace)\n const missing = toBoolIfSet(rawFlags.missing)\n return {\n allowAssetsInDifferentDataset,\n allowFailingAssets,\n assetConcurrency,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n replaceAssets,\n replace,\n missing,\n }\n}\n\nconst importDatasetCommand: CliCommandDefinition = {\n name: 'import',\n group: 'dataset',\n signature: '[FILE | FOLDER | URL] [TARGET_DATASET]',\n description: 'Import documents to given dataset from either an ndjson file or a gzipped tarball',\n helpText,\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {apiClient, output, chalk, fromInitCommand} = context\n const flags = parseFlags(args.extOptions)\n const {\n allowAssetsInDifferentDataset,\n allowFailingAssets,\n assetConcurrency,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n replaceAssets,\n } = flags\n\n const operation = getMutationOperation(args.extOptions)\n const client = apiClient()\n\n const [file, target] = args.argsWithoutOptions\n if (!file) {\n throw new Error(\n `Source file name and target dataset must be specified (\"sanity dataset import ${chalk.bold(\n '[file]',\n )} [dataset]\")`,\n )\n }\n\n const targetDataset = await determineTargetDataset(target, context)\n debug(`Target dataset has been set to \"${targetDataset}\"`)\n\n const isUrl = /^https?:\\/\\//i.test(file)\n let inputStream\n let assetsBase\n let sourceIsFolder = false\n\n if (isUrl) {\n debug('Input is a URL, streaming from source URL')\n inputStream = await getUrlStream(file)\n } else {\n const sourceFile = path.resolve(process.cwd(), file)\n const fileStats = await fs.stat(sourceFile).catch(() => null)\n if (!fileStats) {\n throw new Error(`${sourceFile} does not exist or is not readable`)\n }\n\n sourceIsFolder = fileStats.isDirectory()\n if (sourceIsFolder) {\n inputStream = sourceFile\n } else {\n assetsBase = path.dirname(sourceFile)\n inputStream = await createReadStream(sourceFile)\n }\n }\n\n const importClient = client.clone().config({dataset: targetDataset})\n\n // Print information about what projectId and dataset it is being imported to\n const {projectId, dataset} = importClient.config()\n\n output.print('╭───────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Importing to: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(44)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(dataset).padEnd(46)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────╯')\n output.print('')\n\n let currentStep: string | undefined\n let currentProgress: ReturnType<CliOutputter['spinner']> | undefined\n let stepStart: number | undefined\n let spinInterval: ReturnType<typeof setInterval> | null = null\n let percent: string | undefined\n\n function onProgress(opts: ProgressEvent) {\n const lengthComputable = opts.total\n const sameStep = opts.step == currentStep\n percent = getPercentage(opts)\n\n if (lengthComputable && opts.total === opts.current) {\n if (spinInterval) {\n clearInterval(spinInterval)\n }\n spinInterval = null\n }\n\n if (sameStep) {\n return\n }\n\n // Moved to a new step\n const prevStep = currentStep\n const prevStepStart = stepStart || Date.now()\n stepStart = Date.now()\n currentStep = opts.step\n\n if (currentProgress && currentProgress.succeed) {\n const timeSpent = prettyMs(Date.now() - prevStepStart, {\n secondsDecimalDigits: 2,\n })\n currentProgress.text = `[100%] ${prevStep} (${timeSpent})`\n currentProgress.succeed()\n }\n\n currentProgress = output.spinner(`[0%] ${opts.step} (0.00s)`).start()\n\n if (spinInterval) {\n clearInterval(spinInterval)\n spinInterval = null\n }\n\n spinInterval = setInterval(() => {\n const timeSpent = prettyMs(Date.now() - prevStepStart, {\n secondsDecimalDigits: 2,\n })\n\n if (currentProgress) {\n currentProgress.text = `${percent}${opts.step} (${timeSpent})`\n }\n }, 60)\n }\n\n function endTask({success}: {success: boolean}) {\n if (spinInterval) {\n clearInterval(spinInterval)\n }\n\n spinInterval = null\n\n if (success && stepStart && currentProgress) {\n const timeSpent = prettyMs(Date.now() - stepStart, {\n secondsDecimalDigits: 2,\n })\n currentProgress.text = `[100%] ${currentStep} (${timeSpent})`\n currentProgress.succeed()\n } else if (currentProgress) {\n currentProgress.fail()\n }\n }\n\n // Start the import!\n try {\n const {numDocs, warnings} = await sanityImport(inputStream, {\n client: importClient,\n assetsBase,\n operation,\n onProgress,\n allowFailingAssets,\n allowAssetsInDifferentDataset,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n assetConcurrency,\n replaceAssets,\n })\n\n endTask({success: true})\n\n output.print('Done! Imported %d documents to dataset \"%s\"\\n', numDocs, targetDataset)\n printWarnings(warnings, output)\n } catch (err) {\n endTask({success: false})\n\n const isNonRefConflict =\n !fromInitCommand &&\n err.response &&\n err.response.statusCode === 409 &&\n err.step !== 'strengthen-references'\n\n if (!isNonRefConflict) {\n throw err\n }\n\n const message = [\n err.message,\n '',\n 'You probably want either:',\n ' --replace (replace existing documents with same IDs)',\n ' --missing (only import documents that do not already exist)',\n '',\n ].join('\\n')\n\n // @todo SUBCLASS ERROR?\n const error = new Error(message) as any\n error.details = err.details\n error.response = err.response\n error.responseBody = err.responseBody\n\n throw error\n }\n },\n}\n\nasync function determineTargetDataset(target: string, context: CliCommandContext) {\n const {apiClient, output, prompt} = context\n const client = apiClient()\n\n if (target) {\n const dsError = validateDatasetName(target)\n if (dsError) {\n throw new Error(dsError)\n }\n }\n\n debug('Fetching available datasets')\n const spinner = output.spinner('Fetching available datasets').start()\n const datasets = await client.datasets.list()\n spinner.succeed('[100%] Fetching available datasets')\n\n let targetDataset = target ? `${target}` : null\n if (!targetDataset) {\n targetDataset = await chooseDatasetPrompt(context, {\n message: 'Select target dataset',\n allowCreation: true,\n })\n } else if (!datasets.find((dataset) => dataset.name === targetDataset)) {\n debug('Target dataset does not exist, prompting for creation')\n const shouldCreate = await prompt.single({\n type: 'confirm',\n message: `Dataset \"${targetDataset}\" does not exist, would you like to create it?`,\n default: true,\n })\n\n if (!shouldCreate) {\n throw new Error(`Dataset \"${targetDataset}\" does not exist`)\n }\n\n await client.datasets.create(targetDataset)\n }\n\n return targetDataset\n}\n\nfunction getMutationOperation(flags: ParsedImportFlags) {\n const {replace, missing} = flags\n if (replace && missing) {\n throw new Error('Cannot use both --replace and --missing')\n }\n\n if (flags.replace) {\n return 'createOrReplace'\n }\n\n if (flags.missing) {\n return 'createIfNotExists'\n }\n\n return 'create'\n}\n\nfunction getPercentage(opts: ProgressEvent) {\n if (!opts.total || typeof opts.current === 'undefined') {\n return ''\n }\n\n const percent = Math.floor((opts.current / opts.total) * 100)\n return `[${padStart(`${percent}`, 3, ' ')}%] `\n}\n\nfunction getUrlStream(url: string) {\n const request = getIt([promise({onlyBody: true})])\n return request({url, stream: true})\n}\n\nfunction printWarnings(warnings: ImportWarning[], output: CliOutputter) {\n const assetFails = warnings.filter((warn) => warn.type === 'asset')\n\n if (!assetFails.length) {\n return\n }\n\n const warn = (output.warn || output.print).bind(output)\n\n warn(yellow('⚠ Failed to import the following %s:'), assetFails.length > 1 ? 'assets' : 'asset')\n\n warnings.forEach((warning) => {\n warn(` ${warning.url}`)\n })\n}\n\nexport default importDatasetCommand\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const listAliasesHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n const aliases = await aliasClient.listAliases(client)\n output.print(\n aliases\n .map((set) => `${ALIAS_PREFIX}${set.name} -> ${set.datasetName || '<unlinked>'}`)\n .join('\\n'),\n )\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {listAliasesHandler} from './alias/listAliasesHandler'\n\nconst listDatasetsCommand: CliCommandDefinition = {\n name: 'list',\n group: 'dataset',\n helpText: '',\n signature: '',\n description: 'List datasets of your project',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n const datasets = await client.datasets.list()\n output.print(datasets.map((set) => set.name).join('\\n'))\n\n // Print alias list\n await listAliasesHandler(args, context)\n },\n}\n\nexport default listDatasetsCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeployStudioActionFlags} from '../../actions/deploy/deployAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --auto-updates / --no-auto-updates Enable/disable auto updates of studio versions\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n --no-build Don't build the studio prior to deploy, instead deploying the version currently in \\`dist/\\`\n --schema-required Require schema extraction and storing to be successful\n --verbose Enable verbose logging\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity deploy\n sanity deploy --no-minify --source-maps\n`\n\nconst deployCommand: CliCommandDefinition = {\n name: 'deploy',\n signature: '[SOURCE_DIR] [--no-build] [--source-maps] [--no-minify]',\n description: 'Builds and deploys Sanity Studio to Sanity hosting',\n action: async (\n args: CliCommandArguments<DeployStudioActionFlags>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/deployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deployCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nconst helpText = `\nExamples\n sanity undeploy\n`\n\nconst undeployCommand: CliCommandDefinition = {\n name: 'undeploy',\n signature: '',\n description: 'Removes the deployed Sanity Studio from Sanity hosting',\n action: async (\n args: CliCommandArguments<Record<string, unknown>>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/undeployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default undeployCommand\n","import fs from 'node:fs/promises'\nimport os from 'node:os'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {\n type IdentifiedSanityDocumentStub,\n type MultipleMutationResult,\n type Mutation,\n type SanityClient,\n} from '@sanity/client'\nimport {uuid} from '@sanity/uuid'\nimport chokidar from 'chokidar'\nimport execa from 'execa'\nimport json5 from 'json5'\nimport {isEqual, isPlainObject, noop} from 'lodash'\n\ntype MutationOperationName = 'create' | 'createOrReplace' | 'createIfNotExists'\n\ninterface CreateFlags {\n dataset?: string\n replace?: boolean\n missing?: boolean\n watch?: boolean\n json5?: boolean\n id?: string\n}\n\nconst helpText = `\nOptions\n --replace On duplicate document IDs, replace existing document with specified document(s)\n --missing On duplicate document IDs, don't modify the target document(s)\n --watch Write the documents whenever the target file or buffer changes\n --json5 Use JSON5 file type to allow a \"simplified\" version of JSON\n --id <id> Specify a document ID to use. Will fetch remote document ID and populate editor.\n --dataset NAME to override dataset\n\nExamples\n # Create the document specified in \"myDocument.json\".\n sanity documents create myDocument.json\n\n # Open configured $EDITOR and create the specified document(s)\n sanity documents create\n\n # Fetch document with the ID \"myDocId\" and open configured $EDITOR with the\n # current document content (if any). Replace document with the edited version\n # when the editor closes\n sanity documents create --id myDocId --replace\n\n # Open configured $EDITOR and replace the document with the given content\n # on each save. Use JSON5 file extension and parser for simplified syntax.\n sanity documents create --id myDocId --watch --replace --json5\n`\n\nconst createDocumentsCommand: CliCommandDefinition<CreateFlags> = {\n name: 'create',\n group: 'documents',\n signature: '[FILE]',\n helpText,\n description: 'Create one or more documents',\n // eslint-disable-next-line complexity\n action: async (args, context) => {\n const {apiClient, output} = context\n const {replace, missing, watch, id, dataset} = args.extOptions\n const [file] = args.argsWithoutOptions\n const useJson5 = args.extOptions.json5\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n if (replace && missing) {\n throw new Error('Cannot use both --replace and --missing')\n }\n\n if (id && file) {\n throw new Error('Cannot use --id when specifying a file path')\n }\n\n let operation: MutationOperationName = 'create'\n if (replace || missing) {\n operation = replace ? 'createOrReplace' : 'createIfNotExists'\n }\n\n if (file) {\n const contentPath = path.resolve(process.cwd(), file)\n const content = json5.parse(await fs.readFile(contentPath, 'utf8'))\n const result = await writeDocuments(content, operation, client)\n output.print(getResultMessage(result, operation))\n return\n }\n\n // Create a temporary file and use that as source, opening an editor on it\n const docId = id || uuid()\n const ext = useJson5 ? 'json5' : 'json'\n const tmpFile = path.join(os.tmpdir(), 'sanity-cli', `${docId}.${ext}`)\n const stringify = useJson5 ? json5.stringify : JSON.stringify\n const defaultValue = (id && (await client.getDocument(id))) || {_id: docId, _type: 'specify-me'}\n await fs.mkdir(path.join(os.tmpdir(), 'sanity-cli'), {recursive: true})\n await fs.writeFile(tmpFile, stringify(defaultValue, null, 2), 'utf8')\n\n const editor = getEditor()\n if (watch) {\n // If we're in watch mode, we want to run the creation on each change (if it validates)\n registerUnlinkOnSigInt(tmpFile)\n output.print(`Watch mode: ${tmpFile}`)\n output.print('Watch mode: Will write documents on each save.')\n output.print('Watch mode: Press Ctrl + C to cancel watch mode.')\n chokidar.watch(tmpFile).on('change', () => {\n output.print('')\n return readAndPerformCreatesFromFile(tmpFile)\n })\n execa(editor.bin, editor.args.concat(tmpFile), {stdio: 'inherit'})\n } else {\n // While in normal mode, we just want to wait for the editor to close and run the thing once\n execa.sync(editor.bin, editor.args.concat(tmpFile), {stdio: 'inherit'})\n await readAndPerformCreatesFromFile(tmpFile)\n await fs.unlink(tmpFile).catch(noop)\n }\n\n async function readAndPerformCreatesFromFile(filePath: string) {\n let content\n try {\n content = json5.parse(await fs.readFile(filePath, 'utf8'))\n } catch (err) {\n output.error(`Failed to read input: ${err.message}`)\n return\n }\n\n if (isEqual(content, defaultValue)) {\n output.print('Value not modified, doing nothing.')\n output.print('Modify document to trigger creation.')\n return\n }\n\n try {\n const writeResult = await writeDocuments(content, operation, client)\n output.print(getResultMessage(writeResult, operation))\n } catch (err) {\n output.error(`Failed to write documents: ${err.message}`)\n if (err.message.includes('already exists')) {\n output.error('Perhaps you want to use `--replace` or `--missing`?')\n }\n }\n }\n },\n}\n\nfunction registerUnlinkOnSigInt(tmpFile: string) {\n process.on('SIGINT', async () => {\n await fs.unlink(tmpFile).catch(noop)\n // eslint-disable-next-line no-process-exit\n process.exit(130)\n })\n}\n\nfunction writeDocuments(\n documents: {_id?: string; _type: string} | {_id?: string; _type: string}[],\n operation: MutationOperationName,\n client: SanityClient,\n) {\n const docs = Array.isArray(documents) ? documents : [documents]\n if (docs.length === 0) {\n throw new Error('No documents provided')\n }\n\n const mutations = docs.map((doc, index): Mutation => {\n validateDocument(doc, index, docs)\n if (operation === 'create') {\n return {create: doc}\n }\n\n if (operation === 'createIfNotExists') {\n if (isIdentifiedSanityDocument(doc)) {\n return {createIfNotExists: doc}\n }\n\n throw new Error(`Missing required _id attribute for ${operation}`)\n }\n\n if (operation === 'createOrReplace') {\n if (isIdentifiedSanityDocument(doc)) {\n return {createOrReplace: doc}\n }\n\n throw new Error(`Missing required _id attribute for ${operation}`)\n }\n\n throw new Error(`Unsupported operation ${operation}`)\n })\n\n return client.transaction(mutations).commit()\n}\n\nfunction validateDocument(doc: unknown, index: number, arr: unknown[]) {\n const isSingle = arr.length === 1\n\n if (!isPlainObject(doc)) {\n throw new Error(getErrorMessage('must be an object', index, isSingle))\n }\n\n if (!isSanityDocumentish(doc)) {\n throw new Error(getErrorMessage('must have a `_type` property of type string', index, isSingle))\n }\n}\n\nfunction isSanityDocumentish(doc: unknown): doc is {_type: string} {\n return (\n doc !== null &&\n typeof doc === 'object' &&\n '_type' in doc &&\n typeof (doc as any)._type === 'string'\n )\n}\n\nfunction isIdentifiedSanityDocument(doc: unknown): doc is IdentifiedSanityDocumentStub {\n return isSanityDocumentish(doc) && '_id' in doc\n}\n\nfunction getErrorMessage(message: string, index: number, isSingle: boolean): string {\n return isSingle ? `Document ${message}` : `Document at index ${index} ${message}`\n}\n\nfunction getResultMessage(\n result: MultipleMutationResult,\n operation: MutationOperationName,\n): string {\n const joiner = '\\n - '\n if (operation === 'createOrReplace') {\n return `Upserted:\\n - ${result.results.map((res) => res.id).join(joiner)}`\n }\n\n if (operation === 'create') {\n return `Created:\\n - ${result.results.map((res) => res.id).join(joiner)}`\n }\n\n // \"Missing\" (createIfNotExists)\n const created: string[] = []\n const skipped: string[] = []\n for (const res of result.results) {\n if (res.operation === 'update') {\n skipped.push(res.id)\n } else {\n created.push(res.id)\n }\n }\n\n if (created.length > 0 && skipped.length > 0) {\n return [\n `Created:\\n - ${created.join(joiner)}`,\n `Skipped (already exists):${joiner}${skipped.join(joiner)}`,\n ].join('\\n\\n')\n } else if (created.length > 0) {\n return `Created:\\n - ${created.join(joiner)}`\n }\n\n return `Skipped (already exists):\\n - ${skipped.join(joiner)}`\n}\n\nfunction getEditor() {\n const defaultEditor = /^win/.test(process.platform) ? 'notepad' : 'vim'\n // eslint-disable-next-line no-process-env\n const editor = process.env.VISUAL || process.env.EDITOR || defaultEditor\n const args = editor.split(/\\s+/)\n const bin = args.shift() || ''\n return {bin, args}\n}\n\nexport default createDocumentsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport pluralize from 'pluralize-esm'\n\nconst helpText = `\nDelete a document from the projects configured dataset\n\nOptions\n --dataset NAME to override dataset\n\nExample\n # Delete the document with the ID \"myDocId\"\n sanity documents delete myDocId\n\n # ID wrapped in double or single quote works equally well\n sanity documents delete 'myDocId'\n\n # Delete document with ID \"someDocId\" from dataset \"blog\"\n sanity documents delete --dataset=blog someDocId\n\n # Delete the document with ID \"doc1\" and \"doc2\"\n sanity documents delete doc1 doc2\n`\n\ninterface DeleteFlags {\n dataset?: string\n}\n\nconst deleteDocumentsCommand: CliCommandDefinition<DeleteFlags> = {\n name: 'delete',\n group: 'documents',\n signature: '[ID] [...IDS]',\n helpText,\n description: 'Delete a document by ID',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {dataset} = args.extOptions\n const ids = args.argsWithoutOptions.map((str) => `${str}`)\n\n if (!ids.length) {\n throw new Error('Document ID must be specified')\n }\n\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n const transaction = ids.reduce((trx, id) => trx.delete(id), client.transaction())\n try {\n const {results} = await transaction.commit()\n const deleted = results.filter((res) => res.operation === 'delete').map((res) => res.id)\n const notFound = ids.filter((id) => !deleted.includes(id))\n if (deleted.length > 0) {\n output.print(`Deleted ${deleted.length} ${pluralize('document', deleted.length)}`)\n }\n\n if (notFound.length > 0) {\n output.error(\n chalk.red(`${pluralize('Document', notFound.length)} not found: ${notFound.join(', ')}`),\n )\n }\n } catch (err) {\n throw new Error(`Failed to delete ${pluralize('document', ids.length)}:\\n${err.message}`)\n }\n },\n}\n\nexport default deleteDocumentsCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst documentsGroup: CliCommandGroupDefinition = {\n name: 'documents',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages documents in your Sanity Content Lake datasets',\n}\n\nexport default documentsGroup\n","import {type CliCommandContext} from '@sanity/cli'\nimport tokenize, {type LexerToken} from 'json-lexer'\n\ninterface KeyToken {\n type: 'key'\n value: string\n raw: string\n}\n\ntype ExtendedLexerToken = LexerToken | KeyToken\n\nconst identity = (inp: string): string => inp\n\nexport function colorizeJson(input: unknown, chalk: CliCommandContext['chalk']): string {\n const formatters: Record<ExtendedLexerToken['type'], (str: string) => string> = {\n punctuator: chalk.white,\n key: chalk.white,\n string: chalk.green,\n number: chalk.yellow,\n literal: chalk.bold,\n whitespace: identity,\n }\n\n const json = JSON.stringify(input, null, 2)\n\n return tokenize(json)\n .map((token, i, arr): ExtendedLexerToken => {\n // Note how the following only works because we pretty-print the JSON\n const prevToken = i === 0 ? token : arr[i - 1]\n if (\n token.type === 'string' &&\n prevToken.type === 'whitespace' &&\n /^\\n\\s+$/.test(prevToken.value)\n ) {\n return {...token, type: 'key'}\n }\n\n return token\n })\n .map((token) => {\n const formatter = formatters[token.type] || identity\n return formatter(token.raw)\n })\n .join('')\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {colorizeJson} from '../../util/colorizeJson'\n\nconst helpText = `\nGet and print a document from the projects configured dataset\n\nOptions\n --pretty colorized JSON output\n --dataset NAME to override dataset\n\nExamples\n # Get the document with the ID \"myDocId\"\n sanity documents get myDocId\n\n # ID wrapped in double or single quote works equally well\n sanity documents get 'myDocId'\n`\n\ninterface GetDocumentFlags {\n pretty?: boolean\n dataset?: string\n}\n\nconst getDocumentsCommand: CliCommandDefinition<GetDocumentFlags> = {\n name: 'get',\n group: 'documents',\n signature: '[DOCUMENT_ID]',\n helpText,\n description: 'Get and print a document by ID',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {pretty, dataset} = args.extOptions\n const [docId] = args.argsWithoutOptions.map((str) => `${str}`)\n\n if (!docId) {\n throw new Error('Document ID must be specified')\n }\n\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n try {\n const doc = await client.getDocument(docId)\n if (!doc) {\n throw new Error(`Document ${docId} not found`)\n }\n\n output.print(pretty ? colorizeJson(doc, chalk) : JSON.stringify(doc, null, 2))\n } catch (err) {\n throw new Error(`Failed to fetch document:\\n${err.message}`)\n }\n },\n}\n\nexport default getDocumentsCommand\n","import {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {colorizeJson} from '../../util/colorizeJson'\n\nconst defaultApiVersion = 'v2022-06-01'\n\nconst helpText = `\nRun a query against the projects configured dataset\n\nOptions\n --pretty colorized JSON output\n --dataset NAME to override dataset\n --project PROJECT to override project ID\n --anonymous Send the query without any authorization token\n --api-version API version to use (defaults to \\`${defaultApiVersion}\\`)\n\nEnvironment variables\n \\`SANITY_CLI_QUERY_API_VERSION\\` - will use the defined API version,\n unless \\`--api-version\\` is specified.\n\nExamples\n # Fetch 5 documents of type \"movie\"\n sanity documents query '*[_type == \"movie\"][0..4]'\n\n # Fetch title of the oldest movie in the dataset named \"staging\"\n sanity documents query '*[_type == \"movie\"]|order(releaseDate asc)[0]{title}' --dataset staging\n\n # Use API version v2021-06-07 and do a query\n sanity documents query --api-version v2021-06-07 '*[_id == \"header\"] { \"headerText\": pt::text(body) }'\n`\n\ninterface CliQueryCommandFlags {\n pretty?: boolean\n anonymous?: boolean\n dataset?: string\n project?: string\n apiVersion?: string\n}\n\nexport default {\n name: 'query',\n group: 'documents',\n signature: '[QUERY]',\n helpText,\n description: 'Query for documents',\n action: async (\n args: CliCommandArguments<CliQueryCommandFlags>,\n context: CliCommandContext,\n ): Promise<void> => {\n // Reparsing arguments for improved control of flags\n const {\n pretty,\n dataset,\n project,\n anonymous,\n 'api-version': apiVersion,\n } = await parseCliFlags(args)\n const {apiClient, output, chalk, cliConfig} = context\n const [query] = args.argsWithoutOptions\n\n if (!query) {\n throw new Error('Query must be specified')\n }\n\n if (!apiVersion) {\n output.warn(chalk.yellow(`--api-version not specified, using \\`${defaultApiVersion}\\``))\n }\n\n const requireDataset = !dataset\n const requireProject = !project\n const requireUser = !anonymous\n\n if (requireProject && !cliConfig?.api?.projectId) {\n throw new Error(\n 'No project configured in CLI config - either configure one, or use `--project` flag',\n )\n }\n\n if (requireDataset && !cliConfig?.api?.dataset) {\n throw new Error(\n 'No dataset configured in CLI config - either configure one, or use `--dataset` flag',\n )\n }\n\n const baseClient = apiClient({requireProject, requireUser}).clone()\n const {dataset: originalDataset, projectId: originalProjectId} = baseClient.config()\n\n const client = baseClient.config({\n projectId: project || originalProjectId,\n dataset: dataset || originalDataset,\n apiVersion: apiVersion || defaultApiVersion,\n })\n\n try {\n const docs = await client.fetch(query)\n if (!docs) {\n throw new Error('Query returned no results')\n }\n\n output.print(pretty ? colorizeJson(docs, chalk) : JSON.stringify(docs, null, 2))\n } catch (err) {\n throw new Error(`Failed to run query:\\n${err.message}`)\n }\n },\n}\n\nfunction parseCliFlags(args: CliCommandArguments<CliQueryCommandFlags>) {\n // eslint-disable-next-line no-process-env\n const fallbackApiVersion = process.env.SANITY_CLI_QUERY_API_VERSION\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .option('pretty', {type: 'boolean', default: false})\n .option('dataset', {type: 'string'})\n .option('project', {type: 'string'})\n .option('anonymous', {type: 'boolean', default: false})\n .option('api-version', {type: 'string', default: fallbackApiVersion}).argv\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = `Downloads and validates all document specified in a workspace`\n\nconst helpText = `\nOptions\n -y, --yes Skips the first confirmation prompt.\n --workspace <name> The name of the workspace to use when downloading and validating all documents.\n --dataset <name> Override the dataset used. By default, this is derived from the given workspace.\n --file <filepath> Provide a path to either an .ndjson file or a tarball containing an .ndjson file.\n --format <pretty|ndjson|json> The output format used to print the found validation markers and report progress.\n --level <error|warning|info> The minimum level reported out. Defaults to warning.\n --max-custom-validation-concurrency <number> Specify how many custom validators can run concurrently. Defaults to 5.\n --max-fetch-concurrency <number> Specify how many \\`client.fetch\\` requests are allow concurrency at once. Defaults to 25.\n\nExamples\n # Validates all documents in a Sanity project with more than one workspace\n sanity documents validate --workspace default\n\n # Override the dataset specified in the workspace\n sanity documents validate --workspace default --dataset staging\n\n # Save the results of the report into a file\n sanity documents validate --yes > report.txt\n\n # Report out info level validation markers too\n sanity documents validate --level info\n`\n\nconst validateDocumentsCommand: CliCommandDefinition = {\n name: 'validate',\n group: 'documents',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/validation/validateAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default validateDocumentsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst helpText = `\nOptions\n --with-user-token Prime access token from CLI config into getCliClient()\n --mock-browser-env Mocks a browser-like environment using jsdom\n\nExamples\n # Run the script at some/script.js in Sanity context\n sanity exec some/script.js\n\n # Run the script at migrations/fullname.ts and configure \\`getCliClient()\\`\n # from \\`sanity/cli\\`to include the current user's token\n sanity exec migrations/fullname.ts --with-user-token\n\n # Run the script at scripts/browserScript.js in a mock browser environment\n sanity exec scripts/browserScript.js --mock-browser-env\n\n # Pass arbitrary arguments to scripts by separating them with a \\`--\\`.\n # Arguments are available in \\`process.argv\\` as they would in regular node scripts\n # eg the following command would yield a \\`process.argv\\` of:\n # ['/path/to/node', '/path/to/myscript.js', '--dry-run', 'positional-argument']\n sanity exec --mock-browser-env myscript.js -- --dry-run positional-argument\n`\n\nexport const execCommand: CliCommandDefinition = {\n name: 'exec',\n signature: 'SCRIPT',\n description: 'Executes a script within the Sanity Studio context',\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/exec/execScript')\n\n return mod.default(args, context)\n },\n}\n\nexport default execCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeleteGraphQLApiFlags} from '../../actions/graphql/deleteApiAction'\n\nconst helpText = `\nOptions\n --api <api-id> Undeploy API with this ID (project, dataset and tag flags takes preference)\n --project <projectId> Project ID to delete GraphQL API for\n --dataset <dataset> Delete GraphQL API for the given dataset\n --tag <tag> Delete GraphQL API for the given tag (defaults to 'default')\n --force Skip confirmation prompt, forcefully undeploying the GraphQL API\n\nExamples\n sanity graphql undeploy\n sanity graphql undeploy --api ios\n sanity graphql undeploy --dataset staging\n sanity graphql undeploy --dataset staging --tag next\n`\n\nconst deleteGraphQLAPICommand: CliCommandDefinition = {\n name: 'undeploy',\n group: 'graphql',\n signature: '',\n description: 'Remove a deployed GraphQL API',\n action: async (args: CliCommandArguments<DeleteGraphQLApiFlags>, context: CliCommandContext) => {\n const mod = await import('../../actions/graphql/deleteApiAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deleteGraphQLAPICommand\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nconst helpText = `\nOptions\n --dry-run Validate defined APIs, exiting with an error on breaking changes\n --force Deploy API without confirming breaking changes\n --api <api-id> Only deploy API with this ID. Can be specified multiple times.\n\nThe following options will override any setting from the CLI configuration file\n(sanity.cli.js/sanity.cli.ts) - and applies to ALL defined APIs defined in that\nconfiguration file. Tread with caution!\n\n --tag Deploy API(s) to given tag (defaults to 'default')\n --dataset <name> Deploy API for the given dataset\n --generation <gen1|gen2|gen3> API generation to deploy (defaults to 'gen3')\n --non-null-document-fields Use non-null document fields (_id, _type etc)\n --playground Enable GraphQL playground for easier debugging\n --no-playground Disable GraphQL playground\n --with-union-cache *Experimental:* Enable union cache that optimizes schema generation for schemas with many self referencing types\n\nExamples\n # Deploy all defined GraphQL APIs\n sanity graphql deploy\n\n # Validate defined GraphQL APIs, check for breaking changes, skip deploy\n sanity graphql deploy --dry-run\n\n # Deploy only the GraphQL APIs with the IDs \"staging\" and \"ios\"\n sanity graphql deploy --api staging --api ios\n\n # Deploy all defined GraphQL APIs, overriding any playground setting\n sanity graphql deploy --playground\n`\n\nconst deployGraphQLAPICommand: CliCommandDefinition = {\n name: 'deploy',\n signature: '',\n group: 'graphql',\n description: 'Deploy a GraphQL API from the current Sanity schema',\n action: async (args: {argv?: string[]}, context: CliCommandContext) => {\n const mod = await import('../../actions/graphql/deployApiAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deployGraphQLAPICommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst graphqlGroup: CliCommandGroupDefinition = {\n name: 'graphql',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: \"Deploys changes to your project's GraphQL API(s)\",\n}\n\nexport default graphqlGroup\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nconst helpText = `\nExamples\n sanity graphql list\n`\n\nconst listGraphQLAPIsCommand: CliCommandDefinition = {\n name: 'list',\n signature: '',\n group: 'graphql',\n description: 'Lists all the GraphQL endpoints deployed for this project',\n action: async (\n args: CliCommandArguments<Record<string, unknown>>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/graphql/listApisAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default listGraphQLAPIsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport open from 'open'\n\nconst createHookCommand: CliCommandDefinition = {\n name: 'create',\n group: 'hook',\n signature: '',\n helpText: '',\n description: 'Create a new hook for the given dataset',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n const {projectId} = client.config()\n if (!projectId) {\n throw new Error('No project ID found')\n }\n\n const projectInfo = (await client.projects.getById(projectId)) || {}\n const organizationId = projectInfo.organizationId || 'personal'\n const manageUrl = `https://www.sanity.io/organizations/${organizationId}/project/${projectId}/api/webhooks/new`\n\n output.print(`Opening ${manageUrl}`)\n open(manageUrl)\n },\n}\n\nexport default createHookCommand\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type Hook} from './types'\n\nconst deleteHookCommand: CliCommandDefinition = {\n name: 'delete',\n group: 'hook',\n signature: '[NAME]',\n helpText: '',\n description: 'Delete a hook within your project',\n action: async (args, context) => {\n const {apiClient} = context\n const [name] = args.argsWithoutOptions\n const client = apiClient()\n\n const hookId = await promptForHook(name, context)\n try {\n await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request({method: 'DELETE', uri: `/hooks/${hookId}`})\n } catch (err) {\n throw new Error(`Hook deletion failed:\\n${err.message}`)\n }\n },\n}\n\nasync function promptForHook(specified: string | undefined, context: CliCommandContext) {\n const specifiedName = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient()\n\n const hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks', json: true})\n\n if (specifiedName) {\n const selected = hooks.filter((hook) => hook.name.toLowerCase() === specifiedName)[0]\n if (!selected) {\n throw new Error(`Hook with name \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n const choices = hooks.map((hook) => ({value: hook.id, name: hook.name}))\n return prompt.single({\n message: 'Select hook to delete',\n type: 'list',\n choices,\n })\n}\n\nexport default deleteHookCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst hookGroup: CliCommandGroupDefinition = {\n name: 'hook',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Sets up and manages webhooks within your Sanity project',\n}\n\nexport default hookGroup\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type DeliveryAttempt} from './types'\n\nconst printHookAttemptCommand: CliCommandDefinition = {\n name: 'attempt',\n group: 'hook',\n signature: 'ATTEMPT_ID',\n helpText: '',\n description: 'Print details of a given webhook delivery attempt',\n action: async (args, context) => {\n const {apiClient, output} = context\n const [attemptId] = args.argsWithoutOptions\n const client = apiClient()\n\n let attempt\n try {\n attempt = await client.request<DeliveryAttempt>({uri: `/hooks/attempts/${attemptId}`})\n } catch (err) {\n throw new Error(`Hook attempt retrieval failed:\\n${err.message}`)\n }\n\n const {createdAt, resultCode, resultBody, failureReason, inProgress} = attempt\n\n output.print(`Date: ${createdAt}`)\n output.print(`Status: ${getStatus(attempt)}`)\n output.print(`Status code: ${resultCode}`)\n\n if (attempt.isFailure) {\n output.print(`Failure: ${formatFailure(attempt)}`)\n }\n\n if (!inProgress && (!failureReason || failureReason === 'http')) {\n const body = resultBody ? `\\n---\\n${resultBody}\\n---\\n` : '<empty>'\n output.print(`Response body: ${body}`)\n }\n },\n}\n\nexport default printHookAttemptCommand\n\nexport function formatFailure(\n attempt: DeliveryAttempt,\n options: {includeHelp?: boolean} = {},\n): string {\n const {includeHelp} = options\n const {id, failureReason, resultCode} = attempt\n const help = includeHelp ? `(run \\`sanity hook attempt ${id}\\` for details)` : ''\n switch (failureReason) {\n case 'http':\n return `HTTP ${resultCode} ${help}`\n case 'timeout':\n return 'Request timed out'\n case 'network':\n return 'Network error'\n case 'other':\n default:\n }\n\n return 'Unknown error'\n}\n\nexport function getStatus(attempt: DeliveryAttempt): string {\n if (attempt.isFailure) {\n return 'Failed'\n }\n\n if (attempt.inProgress) {\n return 'In progress'\n }\n\n return 'Delivered'\n}\n","import {inspect} from 'node:util'\n\nimport {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\nimport {groupBy} from 'lodash'\n\nimport {formatFailure} from './printHookAttemptCommand'\nimport {type DeliveryAttempt, type Hook, type HookMessage} from './types'\n\ninterface ListHookFlags {\n detailed?: boolean\n}\n\nconst listHookLogsCommand: CliCommandDefinition<ListHookFlags> = {\n name: 'logs',\n group: 'hook',\n signature: '[NAME]',\n helpText: '',\n description: 'List latest log entries for a given hook',\n action: async (args, context) => {\n const {apiClient} = context\n const flags = args.extOptions\n const [name] = args.argsWithoutOptions\n const client = apiClient()\n\n const hookId = await promptForHook(name, context)\n let messages\n let attempts\n try {\n messages = await client.request<HookMessage[]>({uri: `/hooks/${hookId}/messages`})\n attempts = await client.request<DeliveryAttempt[]>({uri: `/hooks/${hookId}/attempts`})\n } catch (err) {\n throw new Error(`Hook logs retrieval failed:\\n${err.message}`)\n }\n\n const groupedAttempts = groupBy(attempts, 'messageId')\n const populated = messages.map((msg): HookMessage & {attempts: DeliveryAttempt[]} => ({\n ...msg,\n attempts: groupedAttempts[msg.id],\n }))\n\n const totalMessages = messages.length - 1\n populated.forEach((message, i) => {\n printMessage(message, context, {detailed: flags.detailed})\n printSeparator(context, totalMessages === i)\n })\n },\n}\n\nexport default listHookLogsCommand\n\nasync function promptForHook(specified: string | undefined, context: CliCommandContext) {\n const specifiedName = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient()\n\n const hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks', json: true})\n\n if (specifiedName) {\n const selected = hooks.filter((hook) => hook.name.toLowerCase() === specifiedName)[0]\n if (!selected) {\n throw new Error(`Hook with name \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n if (hooks.length === 0) {\n throw new Error('No hooks currently registered')\n }\n\n if (hooks.length === 1) {\n return hooks[0].id\n }\n\n const choices = hooks.map((hook) => ({value: hook.id, name: hook.name}))\n return prompt.single({\n message: 'Select hook to list logs for',\n type: 'list',\n choices,\n })\n}\n\nfunction printSeparator(context: CliCommandContext, skip: boolean) {\n if (!skip) {\n context.output.print('---\\n')\n }\n}\n\nfunction printMessage(\n message: HookMessage & {attempts: DeliveryAttempt[]},\n context: CliCommandContext,\n options: {detailed?: boolean},\n) {\n const {detailed} = options\n const {output, chalk} = context\n\n output.print(`Date: ${message.createdAt}`)\n output.print(`Status: ${message.status}`)\n output.print(`Result code: ${message.resultCode}`)\n\n if (message.failureCount > 0) {\n output.print(`Failures: ${message.failureCount}`)\n }\n\n if (detailed) {\n output.print('Payload:')\n output.print(inspect(JSON.parse(message.payload), {colors: true}))\n }\n\n if (detailed && message.attempts) {\n output.print('Attempts:')\n message.attempts.forEach((attempt) => {\n const date = attempt.createdAt.replace(/\\.\\d+Z$/, 'Z')\n const prefix = ` [${date}]`\n\n if (attempt.inProgress) {\n output.print(`${prefix} ${chalk.yellow('Pending')}`)\n } else if (attempt.isFailure) {\n const failure = formatFailure(attempt, {includeHelp: true})\n output.print(`${prefix} ${chalk.yellow(`Failure: ${failure}`)}`)\n } else {\n output.print(`${prefix} Success: HTTP ${attempt.resultCode} (${attempt.duration}ms)`)\n }\n })\n }\n\n // Leave some empty space between messages\n output.print('')\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type Hook} from './types'\n\nconst listHooksCommand: CliCommandDefinition = {\n name: 'list',\n group: 'hook',\n signature: '',\n helpText: '',\n description: 'List hooks for a given project',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n let hooks\n try {\n hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks'})\n } catch (err) {\n throw new Error(`Hook list retrieval failed:\\n${err.message}`)\n }\n\n hooks.forEach((hook) => {\n output.print(`Name: ${hook.name}`)\n output.print(`Dataset: ${hook.dataset}`)\n output.print(`URL: ${hook.url}`)\n\n if (hook.type === 'document') {\n output.print(`HTTP method: ${hook.httpMethod}`)\n\n if (hook.description) {\n output.print(`Description: ${hook.description}`)\n }\n }\n\n output.print('')\n })\n },\n}\n\nexport default listHooksCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Extracts the studio configuration as one or more JSON manifest files.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change. It is currently intended for use with Create only.\n\nOptions\n --path Optional path to specify destination directory of the manifest files. Default: /dist/static\n\nExamples\n # Extracts manifests\n sanity manifest extract\n\n # Extracts manifests into /public/static\n sanity manifest extract --path /public/static\n`\n\nconst extractManifestCommand: CliCommandDefinition = {\n name: 'extract',\n group: 'manifest',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const {extractManifestSafe} = await import('../../actions/manifest/extractManifestAction')\n const extractError = await extractManifestSafe(args, context)\n if (extractError) {\n throw extractError\n }\n return extractError\n },\n}\n\nexport default extractManifestCommand\n","export default {\n name: 'manifest',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Interacts with the studio configuration.',\n}\n","export const MIGRATIONS_DIRECTORY = 'migrations'\nexport const MIGRATION_SCRIPT_EXTENSIONS = ['mjs', 'js', 'ts', 'cjs']\n","export const minimalAdvanced = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, patch, at, setIfMissing} from 'sanity/migrate'\n\n/**\n * this migration will set \\`Default title\\` on all documents that are missing a title\n * and make \\`true\\` the default value for the \\`enabled\\` field\n */\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n async *migrate(documents, context) {\n for await (const document of documents()) {\n yield patch(document._id, [\n at('title', setIfMissing('Default title')),\n at('enabled', setIfMissing(true)),\n ])\n }\n }\n})\n`\n","export const minimalSimple = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {at, defineMigration, setIfMissing, unset} from 'sanity/migrate'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n document(doc, context) {\n // this will be called for every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n\n return at('title', setIfMissing('Default title'))\n },\n node(node, path, context) {\n // this will be called for every node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n\n if (typeof node === 'string' && node === 'deleteme') {\n return unset()\n }\n },\n object(node, path, context) {\n // this will be called for every object node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n if (node._type === 'author') {\n // make sure all authors objects have a books array\n return at('books', setIfMissing([]))\n }\n },\n array(node, path, context) {\n // this will be called for every array node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n string(node, path, context) {\n // this will be called for every string node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n number(node, path, context) {\n // this will be called for every number node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n boolean(node, path, context) {\n // this will be called for every boolean node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n null(node, path, context) {\n // this will be called for every null node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n },\n})\n`\n","export const renameField = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, at, setIfMissing, unset} from 'sanity/migrate'\n\nconst from = 'oldFieldName'\nconst to = 'newFieldName'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n document(doc, context) {\n return [\n at(to, setIfMissing(doc[from])),\n at(from, unset())\n ]\n }\n }\n})\n`\n","export const renameType = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, at, set} from 'sanity/migrate'\n\nconst oldType = 'old'\nconst newType = 'new'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n object(object, path, context) {\n if (object._type === oldType) {\n return at('_type', set(newType))\n }\n }\n }\n})\n`\n","export const stringToPTE = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {pathsAreEqual, stringToPath} from 'sanity'\nimport {defineMigration, set} from 'sanity/migrate'\n\nconst targetPath = stringToPath('some.path')\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n string(node, path, ctx) {\n if (pathsAreEqual(path, targetPath)) {\n return set([\n {\n style: 'normal',\n _type: 'block',\n children: [\n {\n _type: 'span',\n marks: [],\n text: node,\n },\n ],\n markDefs: [],\n },\n ])\n }\n },\n },\n})\n`\n","import {existsSync, mkdirSync} from 'node:fs'\nimport {writeFile} from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {deburr} from 'lodash'\n\nimport {MIGRATIONS_DIRECTORY} from './constants'\nimport {minimalAdvanced} from './templates/minimalAdvanced'\nimport {minimalSimple} from './templates/minimalSimple'\nimport {renameField} from './templates/renameField'\nimport {renameType} from './templates/renameType'\nimport {stringToPTE} from './templates/stringToPTE'\n\nconst helpText = `\nExamples:\n # Create a new migration, prompting for title and options\n sanity migration create\n\n # Create a new migration with the provided title, prompting for options\n sanity migration create \"Rename field from location to address\"\n`\n\n// eslint-disable-next-line @typescript-eslint/no-empty-interface\ninterface CreateMigrationFlags {}\n\nconst TEMPLATES = [\n {name: 'Minimalistic migration to get you started', template: minimalSimple},\n {name: 'Rename an object type', template: renameType},\n {name: 'Rename a field', template: renameField},\n {name: 'Convert string field to Portable Text', template: stringToPTE},\n {\n name: 'Advanced template using async iterators providing more fine grained control',\n template: minimalAdvanced,\n },\n]\n\nconst createMigrationCommand: CliCommandDefinition<CreateMigrationFlags> = {\n name: 'create',\n group: 'migration',\n signature: '[TITLE]',\n helpText,\n description: 'Create a new migration within your project',\n action: async (args, context) => {\n const {output, prompt, workDir, chalk} = context\n\n let [title] = args.argsWithoutOptions\n\n while (!title?.trim()) {\n title = await prompt.single({\n type: 'input',\n suffix: ' (e.g. \"Rename field from location to address\")',\n message: 'Title of migration',\n })\n if (!title.trim()) {\n output.error(chalk.red('Name cannot be empty'))\n }\n }\n const types = await prompt.single({\n type: 'input',\n suffix: ' (optional)',\n message: 'Type of documents to migrate. You can add multiple types separated by comma',\n })\n\n const templatesByName = Object.fromEntries(TEMPLATES.map((t) => [t.name, t]))\n const template = await prompt.single({\n type: 'list',\n message: 'Select a template',\n choices: TEMPLATES.map((definedTemplate) => ({\n name: definedTemplate.name,\n value: definedTemplate.name,\n })),\n })\n\n const sluggedName = deburr(title.toLowerCase())\n .replace(/\\s+/g, '-')\n .replace(/[^a-z0-9-]/g, '')\n\n const destDir = path.join(workDir, MIGRATIONS_DIRECTORY, sluggedName)\n if (existsSync(destDir)) {\n if (\n !(await prompt.single({\n type: 'confirm',\n message: `Migration directory ${chalk.cyan(destDir)} already exists. Overwrite?`,\n default: false,\n }))\n ) {\n return\n }\n }\n mkdirSync(destDir, {recursive: true})\n\n const renderedTemplate = (templatesByName[template].template || minimalSimple)({\n migrationName: title,\n documentTypes: types\n .split(',')\n .map((t) => t.trim())\n .filter(Boolean),\n })\n\n const definitionFile = path.join(destDir, 'index.ts')\n\n await writeFile(definitionFile, renderedTemplate)\n // To dry run it, run \\`sanity migration run ${sluggedName}\\``)\n output.print()\n output.print(`${chalk.green('✓')} Migration created!`)\n output.print()\n output.print('Next steps:')\n output.print(\n `Open ${chalk.bold(\n definitionFile,\n )} in your code editor and write the code for your migration.`,\n )\n output.print(\n `Dry run the migration with:\\n\\`${chalk.bold(\n `sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> `,\n )}\\``,\n )\n output.print(\n `Run the migration against a dataset with:\\n \\`${chalk.bold(\n `sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> --no-dry-run`,\n )}\\``,\n )\n output.print()\n output.print(\n `👉 Learn more about schema and content migrations at ${chalk.bold(\n 'https://www.sanity.io/docs/schema-and-content-migrations',\n )}`,\n )\n },\n}\nexport default createMigrationCommand\n","import path from 'node:path'\n\nimport {type Migration} from '@sanity/migrate'\nimport {isPlainObject} from 'lodash'\n\nimport {MIGRATION_SCRIPT_EXTENSIONS, MIGRATIONS_DIRECTORY} from '../constants'\n\ninterface ResolvedMigrationScript {\n /**\n * Relative path from the working directory to the migration script\n */\n relativePath: string\n\n /**\n * Absolute path to the migration script\n */\n absolutePath: string\n\n /**\n * The migration module, if it could be resolved - otherwise `undefined`\n */\n mod?: {default: Migration; up?: unknown; down?: unknown}\n}\n\n/**\n * Resolves the potential paths to a migration script.\n * Considers the following paths (where `<ext>` is 'mjs', 'js', 'ts' or 'cjs'):\n *\n * - `<migrationsDir>/<migrationName>.<ext>`\n * - `<migrationsDir>/<migrationName>/index.<ext>`\n *\n * Note that all possible paths are returned, even if the files do not exist.\n * Check the `mod` property to see if a module could actually be loaded.\n *\n * @param workDir - Working directory of the studio\n * @param migrationName - The name of the migration directory to resolve\n * @returns An array of potential migration scripts\n * @internal\n */\nexport function resolveMigrationScript(\n workDir: string,\n migrationName: string,\n): ResolvedMigrationScript[] {\n return [migrationName, path.join(migrationName, 'index')].flatMap((location) =>\n MIGRATION_SCRIPT_EXTENSIONS.map((ext) => {\n const relativePath = path.join(MIGRATIONS_DIRECTORY, `${location}.${ext}`)\n const absolutePath = path.resolve(workDir, relativePath)\n let mod\n try {\n // eslint-disable-next-line import/no-dynamic-require\n mod = require(absolutePath)\n } catch (err) {\n if (err.code !== 'MODULE_NOT_FOUND') {\n throw new Error(`Error: ${err.message}\"`)\n }\n }\n return {relativePath, absolutePath, mod}\n }),\n )\n}\n\n/**\n * Checks whether or not the passed resolved migration script is actually loadable (eg has a default export)\n *\n * @param script - The resolved migration script to check\n * @returns `true` if the script is loadable, `false` otherwise\n * @internal\n */\nexport function isLoadableMigrationScript(\n script: ResolvedMigrationScript,\n): script is Required<ResolvedMigrationScript> {\n if (typeof script.mod === 'undefined' || !isPlainObject(script.mod.default)) {\n return false\n }\n\n const mod = script.mod.default\n return typeof mod.title === 'string' && mod.migrate !== undefined\n}\n","import {readdir} from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {type Migration} from '@sanity/migrate'\nimport {Table} from 'console-table-printer'\nimport {register} from 'esbuild-register/dist/node'\n\nimport {MIGRATION_SCRIPT_EXTENSIONS, MIGRATIONS_DIRECTORY} from './constants'\nimport {isLoadableMigrationScript, resolveMigrationScript} from './utils/resolveMigrationScript'\n\nconst helpText = ``\n\nconst listMigrationCommand: CliCommandDefinition = {\n name: 'list',\n group: 'migration',\n signature: '',\n helpText,\n description: 'List available migrations',\n action: async (_, context) => {\n const {workDir, output, chalk} = context\n try {\n const migrations = await resolveMigrations(workDir)\n\n if (migrations.length === 0) {\n output.print('No migrations found in migrations folder of the project')\n output.print(\n `\\nRun ${chalk.green(`\\`sanity migration create <NAME>\\``)} to create a new migration`,\n )\n return\n }\n\n const table = new Table({\n title: `Found ${migrations.length} migrations in project`,\n columns: [\n {name: 'id', title: 'ID', alignment: 'left'},\n {name: 'title', title: 'Title', alignment: 'left'},\n ],\n })\n\n migrations.forEach((definedMigration) => {\n table.addRow({id: definedMigration.id, title: definedMigration.migration.title})\n })\n table.printTable()\n output.print('\\nRun `sanity migration run <ID>` to run a migration')\n } catch (error) {\n if (error.code === 'ENOENT') {\n output.print('No migrations folder found in the project')\n output.print(\n `\\nRun ${chalk.green(`\\`sanity migration create <NAME>\\``)} to create a new migration`,\n )\n return\n }\n throw new Error(`An error occurred while listing migrations: ${error.message}`)\n }\n },\n}\n\n/**\n * A resolved migration, where you are guaranteed that the migration file exists\n *\n * @internal\n */\nexport interface ResolvedMigration {\n id: string\n migration: Migration\n}\n\n/**\n * Resolves all migrations in the studio working directory\n *\n * @param workDir - The studio working directory\n * @returns Array of migrations and their respective paths\n * @internal\n */\nexport async function resolveMigrations(workDir: string): Promise<ResolvedMigration[]> {\n let unregister\n if (!__DEV__) {\n unregister = register({\n target: `node${process.version.slice(1)}`,\n supported: {'dynamic-import': true},\n }).unregister\n }\n\n const migrationsDir = path.join(workDir, MIGRATIONS_DIRECTORY)\n const migrationEntries = await readdir(migrationsDir, {withFileTypes: true})\n\n const migrations: ResolvedMigration[] = []\n for (const entry of migrationEntries) {\n const entryName = entry.isDirectory() ? entry.name : removeMigrationScriptExtension(entry.name)\n const candidates = resolveMigrationScript(workDir, entryName).filter(isLoadableMigrationScript)\n\n for (const candidate of candidates) {\n migrations.push({\n id: entryName,\n migration: candidate.mod.default,\n })\n }\n }\n\n if (unregister) {\n unregister()\n }\n\n return migrations\n}\n\nfunction removeMigrationScriptExtension(fileName: string) {\n // Remove `.ts`, `.js` etc from the end of a filename\n return MIGRATION_SCRIPT_EXTENSIONS.reduce(\n (name, ext) => (name.endsWith(`.${ext}`) ? path.basename(name, `.${ext}`) : name),\n fileName,\n )\n}\n\nexport default listMigrationCommand\n","export default {\n name: 'migration',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages content migrations for Content Lake datasets',\n}\n","import {isIndexSegment, isIndexTuple, isKeySegment, type Path} from '@sanity/types'\n\n// FIXME: de-dupe this\n// copy/paste of `pathToString` from 'sanity' to prevent circular imports\nfunction pathToString(path: Path): string {\n if (!Array.isArray(path)) {\n throw new Error('Path is not an array')\n }\n\n return path.reduce<string>((target, segment, i) => {\n if (isIndexSegment(segment)) {\n return `${target}[${segment}]`\n }\n\n if (isKeySegment(segment) && segment._key) {\n return `${target}[_key==\"${segment._key}\"]`\n }\n\n if (isIndexTuple(segment)) {\n const [from, to] = segment\n return `${target}[${from}:${to}]`\n }\n\n if (typeof segment === 'string') {\n const separator = i === 0 ? '' : '.'\n return `${target}${separator}${segment}`\n }\n\n throw new Error(`Unsupported path segment \\`${JSON.stringify(segment)}\\``)\n }, '')\n}\n\ninterface BaseNode {\n path: Path\n}\n\nexport interface Tree<Node extends BaseNode> {\n nodes?: Node[]\n children?: Record<string, Tree<Node>>\n}\n\n/**\n * Recursively calculates the max length of all the keys in the given validation\n * tree respecting extra length due to indentation depth. Used to calculate the\n * padding for the rest of the tree.\n */\nexport const maxKeyLength = (children: Record<string, Tree<BaseNode>> = {}, depth = 0): number => {\n return Object.entries(children)\n .map(([key, child]) =>\n Math.max(key.length + depth * 2, maxKeyLength(child.children, depth + 1)),\n )\n .reduce((max, next) => (next > max ? next : max), 0)\n}\n\ninterface Options<Node extends BaseNode> {\n node?: Record<string, Tree<Node>>\n paddingLength: number\n indent?: string\n getNodes?: (node: Tree<Node>) => Node[] | undefined\n getMessage: (node: Node) => string\n}\n\n/**\n * Recursively formats a given tree into a printed user-friendly tree structure\n */\nexport const formatTree = <Node extends BaseNode>({\n node = {},\n paddingLength,\n indent = '',\n getNodes: getLeaves = ({nodes}) => nodes,\n getMessage,\n}: Options<Node>): string => {\n const entries = Object.entries(node)\n\n return entries\n .map(([key, child], index) => {\n const isLast = index === entries.length - 1\n const nextIndent = `${indent}${isLast ? ' ' : '│ '}`\n const leaves = getLeaves(child)\n\n const nested = formatTree({\n node: child.children,\n paddingLength,\n indent: nextIndent,\n getNodes: getLeaves,\n getMessage,\n })\n\n if (!leaves?.length) {\n const current = `${indent}${isLast ? '└' : '├'}─ ${key}`\n return [current, nested].filter(Boolean).join('\\n')\n }\n\n const [first, ...rest] = leaves\n const firstPadding = '.'.repeat(paddingLength - indent.length - key.length)\n const elbow = isLast ? '└' : '├'\n const subsequentPadding = ' '.repeat(paddingLength - indent.length + 2)\n\n const firstMessage = `${indent}${elbow}─ ${key} ${firstPadding} ${getMessage(first)}`\n const subsequentMessages = rest\n .map((marker) => `${nextIndent}${subsequentPadding} ${getMessage(marker)}`)\n .join('\\n')\n\n const current = [firstMessage, subsequentMessages].filter(Boolean).join('\\n')\n return [current, nested].filter(Boolean).join('\\n')\n })\n .join('\\n')\n}\n\n/**\n * Converts a set of markers with paths into a tree of markers where the paths\n * are embedded in the tree\n */\nexport function convertToTree<const Node extends BaseNode>(nodes: Node[]): Tree<Node> {\n const root: Tree<Node> = {}\n\n // add the markers to the tree\n function addNode(node: Node, tree: Tree<Node> = root) {\n // if we've traversed the whole path\n if (!node.path.length) {\n if (!tree.nodes) tree.nodes = [] // ensure markers is defined\n\n // then add the marker to the front\n tree.nodes.push(node)\n return\n }\n\n const [current, ...rest] = node.path\n const key = pathToString([current])\n\n // ensure the current node has children and the next node\n if (!tree.children) tree.children = {}\n if (!(key in tree.children)) tree.children[key] = {}\n\n addNode({...node, path: rest}, tree.children[key])\n }\n\n for (const node of nodes) addNode(node)\n return root\n}\n","import {isatty} from 'node:tty'\n\nimport {type Migration, type Mutation, type NodePatch, type Transaction} from '@sanity/migrate'\nimport {type KeyedSegment} from '@sanity/types'\nimport {type Chalk} from 'chalk'\n\nimport {convertToTree, formatTree, maxKeyLength} from '../../util/tree'\n\ntype ItemRef = string | number\ntype Impact = 'destructive' | 'maybeDestructive' | 'incremental'\ntype Variant = Impact | 'info'\n\nconst isTty = isatty(1)\n\ninterface FormatterOptions<Subject> {\n chalk: Chalk\n subject: Subject\n migration: Migration\n indentSize?: number\n}\n\nexport function prettyFormat({\n chalk,\n subject,\n migration,\n indentSize = 0,\n}: FormatterOptions<Mutation | Transaction | (Mutation | Transaction)[]>): string {\n return (Array.isArray(subject) ? subject : [subject])\n .map((subjectEntry) => {\n if (subjectEntry.type === 'transaction') {\n return [\n [\n badge('transaction', 'info', chalk),\n typeof subjectEntry.id === 'undefined' ? null : chalk.underline(subjectEntry.id),\n ]\n .filter(Boolean)\n .join(' '),\n indent(\n prettyFormat({\n chalk,\n subject: subjectEntry.mutations,\n migration,\n indentSize: indentSize,\n }),\n ),\n ].join('\\n\\n')\n }\n return prettyFormatMutation({\n chalk,\n subject: subjectEntry,\n migration,\n indentSize,\n })\n })\n .join('\\n\\n')\n}\n\nfunction encodeItemRef(ref: number | KeyedSegment): ItemRef {\n return typeof ref === 'number' ? ref : ref._key\n}\n\nfunction badgeStyle(chalk: Chalk, variant: Variant): Chalk {\n const styles: Record<Variant, Chalk> = {\n info: chalk.bgWhite.black,\n incremental: chalk.bgGreen.black.bold,\n maybeDestructive: chalk.bgYellow.black.bold,\n destructive: chalk.bgRed.black.bold,\n }\n\n return styles[variant]\n}\n\nfunction badge(label: string, variant: Variant, chalk: Chalk): string {\n if (!isTty) {\n return `[${label}]`\n }\n\n return badgeStyle(chalk, variant)(` ${label} `)\n}\n\nconst mutationImpact: Record<Mutation['type'], Impact> = {\n create: 'incremental',\n createIfNotExists: 'incremental',\n createOrReplace: 'maybeDestructive',\n delete: 'destructive',\n patch: 'maybeDestructive',\n}\n\nfunction documentId(mutation: Mutation): string | undefined {\n if ('id' in mutation) {\n return mutation.id\n }\n\n if ('document' in mutation) {\n return mutation.document._id\n }\n\n return undefined\n}\n\nconst listFormatter = new Intl.ListFormat('en-US', {\n type: 'disjunction',\n})\n\nfunction mutationHeader(chalk: Chalk, mutation: Mutation, migration: Migration): string {\n const mutationType = badge(mutation.type, mutationImpact[mutation.type], chalk)\n\n const documentType =\n 'document' in mutation || migration.documentTypes\n ? badge(\n 'document' in mutation\n ? mutation.document._type\n : listFormatter.format(migration.documentTypes ?? []),\n 'info',\n chalk,\n )\n : null\n\n // TODO: Should we list documentType when a mutation can be yielded for any document type?\n return [mutationType, documentType, chalk.underline(documentId(mutation))]\n .filter(Boolean)\n .join(' ')\n}\n\nexport function prettyFormatMutation({\n chalk,\n subject,\n migration,\n indentSize = 0,\n}: FormatterOptions<Mutation>): string {\n const lock =\n 'options' in subject ? chalk.cyan(`(if revision==${subject.options?.ifRevision})`) : ''\n const header = [mutationHeader(chalk, subject, migration), lock].join(' ')\n const padding = ' '.repeat(indentSize)\n\n if (\n subject.type === 'create' ||\n subject.type === 'createIfNotExists' ||\n subject.type === 'createOrReplace'\n ) {\n return [header, '\\n', indent(JSON.stringify(subject.document, null, 2), indentSize)].join('')\n }\n\n if (subject.type === 'patch') {\n const tree = convertToTree<NodePatch>(subject.patches.flat())\n const paddingLength = Math.max(maxKeyLength(tree.children) + 2, 30)\n\n return [\n header,\n '\\n',\n formatTree<NodePatch>({\n node: tree.children,\n paddingLength,\n indent: padding,\n getMessage: (patch) => formatPatchMutation(chalk, patch),\n }),\n ].join('')\n }\n\n return header\n}\n\nfunction formatPatchMutation(chalk: Chalk, patch: NodePatch): string {\n const {op} = patch\n const formattedType = chalk.bold(op.type)\n if (op.type === 'unset') {\n return `${chalk.red(formattedType)}()`\n }\n if (op.type === 'diffMatchPatch') {\n return `${chalk.yellow(formattedType)}(${op.value})`\n }\n if (op.type === 'inc' || op.type === 'dec') {\n return `${chalk.yellow(formattedType)}(${op.amount})`\n }\n if (op.type === 'set') {\n return `${chalk.yellow(formattedType)}(${JSON.stringify(op.value)})`\n }\n if (op.type === 'setIfMissing') {\n return `${chalk.green(formattedType)}(${JSON.stringify(op.value)})`\n }\n if (op.type === 'insert') {\n return `${chalk.green(formattedType)}(${op.position}, ${encodeItemRef(\n op.referenceItem,\n )}, ${JSON.stringify(op.items)})`\n }\n if (op.type === 'replace') {\n return `${chalk.yellow(formattedType)}(${encodeItemRef(op.referenceItem)}, ${JSON.stringify(\n op.items,\n )})`\n }\n if (op.type === 'truncate') {\n return `${chalk.red(formattedType)}(${op.startIndex}, ${op.endIndex})`\n }\n // @ts-expect-error all cases are covered\n throw new Error(`Invalid operation type: ${op.type}`)\n}\n\nfunction indent(subject: string, size = 2): string {\n const padding = ' '.repeat(size)\n\n return subject\n .split('\\n')\n .map((line) => padding + line)\n .join('\\n')\n}\n","import path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {\n DEFAULT_MUTATION_CONCURRENCY,\n dryRun,\n MAX_MUTATION_CONCURRENCY,\n type Migration,\n type MigrationProgress,\n run,\n} from '@sanity/migrate'\nimport {Table} from 'console-table-printer'\nimport {register} from 'esbuild-register/dist/node'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {debug} from '../../debug'\nimport {MIGRATIONS_DIRECTORY} from './constants'\nimport {resolveMigrations} from './listMigrationsCommand'\nimport {prettyFormat} from './prettyMutationFormatter'\nimport {isLoadableMigrationScript, resolveMigrationScript} from './utils/resolveMigrationScript'\n\nconst helpText = `\nOptions\n --no-dry-run By default the migration runs in dry mode. Pass this option to migrate dataset.\n --concurrency <concurrent> How many mutation requests to run in parallel. Must be between 1 and ${MAX_MUTATION_CONCURRENCY}. Default: ${DEFAULT_MUTATION_CONCURRENCY}.\n --no-progress Don't output progress. Useful if you want debug your migration script and see the output of console.log() statements.\n --dataset <dataset> Dataset to migrate. Defaults to the dataset configured in your Sanity CLI config.\n --project <project id> Project ID of the dataset to migrate. Defaults to the projectId configured in your Sanity CLI config.\n --no-confirm Skip the confirmation prompt before running the migration. Make sure you know what you're doing before using this flag.\n --from-export <export.tar.gz> Use a local dataset export as source for migration instead of calling the Sanity API. Note: this is only supported for dry runs.\n\n\nExamples\n # dry run the migration\n sanity migration run <id>\n\n # execute the migration against a dataset\n sanity migration run <id> --no-dry-run --project xyz --dataset staging\n\n # execute the migration using a dataset export as the source\n sanity migration run <id> --from-export=production.tar.gz --no-dry-run --projectId xyz --dataset staging\n`\n\ninterface CreateFlags {\n ['dry-run']?: boolean\n concurrency?: number\n ['from-export']?: string\n progress?: boolean\n dataset?: string\n project?: string\n confirm?: boolean\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('dry-run', {type: 'boolean', default: true})\n .options('concurrency', {type: 'number', default: DEFAULT_MUTATION_CONCURRENCY})\n .options('progress', {type: 'boolean', default: true})\n .options('dataset', {type: 'string'})\n .options('from-export', {type: 'string'})\n .options('project', {type: 'string'})\n .options('confirm', {type: 'boolean', default: true}).argv\n}\n\nconst runMigrationCommand: CliCommandDefinition<CreateFlags> = {\n name: 'run',\n group: 'migration',\n signature: 'ID',\n helpText,\n description: 'Run a migration against a dataset',\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {apiClient, output, prompt, chalk, workDir} = context\n const [id] = args.argsWithoutOptions\n const migrationsDirectoryPath = path.join(workDir, MIGRATIONS_DIRECTORY)\n\n const flags = await parseCliFlags(args)\n\n const fromExport = flags.fromExport\n const dry = flags.dryRun\n const dataset = flags.dataset\n const project = flags.project\n\n if ((dataset && !project) || (project && !dataset)) {\n throw new Error('If either --dataset or --project is provided, both must be provided')\n }\n\n if (!id) {\n output.error(chalk.red('Error: Migration ID must be provided'))\n const migrations = await resolveMigrations(workDir)\n const table = new Table({\n title: `Migrations found in project`,\n columns: [\n {name: 'id', title: 'ID', alignment: 'left'},\n {name: 'title', title: 'Title', alignment: 'left'},\n ],\n })\n\n migrations.forEach((definedMigration) => {\n table.addRow({id: definedMigration.id, title: definedMigration.migration.title})\n })\n table.printTable()\n output.print('\\nRun `sanity migration run <ID>` to run a migration')\n\n return\n }\n\n if (!__DEV__) {\n register({\n target: `node${process.version.slice(1)}`,\n supported: {'dynamic-import': true},\n })\n }\n\n const candidates = resolveMigrationScript(workDir, id)\n const resolvedScripts = candidates.filter(isLoadableMigrationScript)\n\n if (resolvedScripts.length > 1) {\n // todo: consider prompt user about which one to run? note: it's likely a mistake if multiple files resolve to the same name\n throw new Error(\n `Found multiple migrations for \"${id}\" in ${chalk.cyan(migrationsDirectoryPath)}: \\n - ${candidates\n .map((candidate) => path.relative(migrationsDirectoryPath, candidate.absolutePath))\n .join('\\n - ')}`,\n )\n }\n\n const script = resolvedScripts[0]\n if (!script) {\n throw new Error(\n `No migration found for \"${id}\" in ${chalk.cyan(chalk.cyan(migrationsDirectoryPath))}. Make sure that the migration file exists and exports a valid migration as its default export.\\n\n Tried the following files:\\n - ${candidates\n .map((candidate) => path.relative(migrationsDirectoryPath, candidate.absolutePath))\n .join('\\n - ')}`,\n )\n }\n\n const mod = script.mod\n if ('up' in mod || 'down' in mod) {\n // todo: consider adding support for up/down as separate named exports\n // For now, make sure we reserve the names for future use\n throw new Error(\n 'Only \"up\" migrations are supported at this time, please use a default export',\n )\n }\n\n const migration: Migration = mod.default\n\n if (fromExport && !dry) {\n throw new Error('Can only dry run migrations from a dataset export file')\n }\n\n const concurrency = flags.concurrency\n if (concurrency !== undefined) {\n if (concurrency > MAX_MUTATION_CONCURRENCY) {\n throw new Error(\n `Concurrency exceeds the maximum allowed value of ${MAX_MUTATION_CONCURRENCY}`,\n )\n }\n\n if (concurrency === 0) {\n throw new Error(`Concurrency must be a positive number, got ${concurrency}`)\n }\n }\n\n const projectConfig = apiClient({\n requireUser: true,\n requireProject: false,\n }).config()\n\n if (!project && !projectConfig.projectId) {\n throw new Error(\n 'sanity.cli.js does not contain a project identifier (\"api.projectId\") and no --project option was provided.',\n )\n }\n\n const apiConfig = {\n dataset: dataset ?? projectConfig.dataset!,\n projectId: project ?? projectConfig.projectId!,\n apiHost: projectConfig.apiHost!,\n token: projectConfig.token!,\n apiVersion: 'v2024-01-29',\n } as const\n if (dry) {\n dryRunHandler()\n return\n }\n\n output.print(\n `\\n${chalk.yellow(chalk.bold('Note: During migrations, your webhooks stay active.'))}`,\n )\n output.print(\n `To adjust them, launch the management interface with ${chalk.cyan('sanity manage')}, navigate to the API settings, and toggle the webhooks before and after the migration as needed.\\n`,\n )\n\n if (flags.confirm) {\n const response = await prompt.single<boolean>({\n message: `This migration will run on the ${chalk.yellow(\n chalk.bold(apiConfig.dataset),\n )} dataset in ${chalk.yellow(chalk.bold(apiConfig.projectId))} project. Are you sure?`,\n type: 'confirm',\n })\n\n if (!response) {\n debug('User aborted migration')\n return\n }\n }\n\n const spinner = output.spinner(`Running migration \"${id}\"`).start()\n await run({api: apiConfig, concurrency, onProgress: createProgress(spinner)}, migration)\n spinner.stop()\n\n function createProgress(progressSpinner: ReturnType<typeof output.spinner>) {\n return function onProgress(progress: MigrationProgress) {\n if (!flags.progress) {\n progressSpinner.stop()\n return\n }\n if (progress.done) {\n progressSpinner.text = `Migration \"${id}\" completed.\n\n Project id: ${chalk.bold(apiConfig.projectId)}\n Dataset: ${chalk.bold(apiConfig.dataset)}\n\n ${progress.documents} documents processed.\n ${progress.mutations} mutations generated.\n ${chalk.green(progress.completedTransactions.length)} transactions committed.`\n progressSpinner.stopAndPersist({symbol: chalk.green('✔')})\n return\n }\n\n ;[null, ...progress.currentTransactions].forEach((transaction) => {\n progressSpinner.text = `Running migration \"${id}\" ${dry ? 'in dry mode...' : '...'}\n\n Project id: ${chalk.bold(apiConfig.projectId)}\n Dataset: ${chalk.bold(apiConfig.dataset)}\n Document type: ${chalk.bold(migration.documentTypes?.join(','))}\n\n ${progress.documents} documents processed…\n ${progress.mutations} mutations generated…\n ${chalk.blue(progress.pending)} requests pending…\n ${chalk.green(progress.completedTransactions.length)} transactions committed.\n\n ${\n transaction && !progress.done\n ? `» ${prettyFormat({chalk, subject: transaction, migration, indentSize: 2})}`\n : ''\n }`\n })\n }\n }\n\n async function dryRunHandler() {\n output.print(`Running migration \"${id}\" in dry mode`)\n\n if (fromExport) {\n output.print(`Using export ${chalk.cyan(fromExport)}`)\n }\n\n output.print()\n output.print(`Project id: ${chalk.bold(apiConfig.projectId)}`)\n output.print(`Dataset: ${chalk.bold(apiConfig.dataset)}`)\n\n for await (const mutation of dryRun({api: apiConfig, exportPath: fromExport}, migration)) {\n if (!mutation) continue\n output.print()\n output.print(\n prettyFormat({\n chalk,\n subject: mutation,\n migration,\n }),\n )\n }\n }\n },\n}\n\nexport default runMigrationCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity preview --host=0.0.0.0\n sanity preview --port=1942\n sanity preview some/build-output-dir\n`\n\nconst previewCommand: CliCommandDefinition = {\n name: 'preview',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Starts a server to preview a production build of Sanity Studio',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const previewAction = await getPreviewAction()\n\n return previewAction(args, context)\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default previewCommand\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type DeleteSchemaFlags} from '../../actions/schema/deleteSchemaAction'\n\nconst description = 'Delete schemas by their IDs.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --ids <schema_id_1,schema_id_2,...> comma-separated list of schema IDs to delete\n --dataset <dataset_name> delete schemas from a specific dataset\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n\nExamples\n # Delete single schema\n sanity schema delete --ids <schema_id>\n\n # Delete multiple schemas\n sanity schema delete --ids <schema_id_1,schema_id_2,...>\n`\n\nconst deleteSchemaCommand = {\n name: 'delete',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/deleteSchemaAction')\n\n return mod.default(args as unknown as CliCommandArguments<DeleteSchemaFlags>, context)\n },\n} satisfies CliCommandDefinition\n\nexport default deleteSchemaCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Extracts a JSON representation of a Sanity schema within a Studio context.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --workspace <name> The name of the workspace to generate a schema for\n --path Optional path to specify destination of the schema file\n --enforce-required-fields Makes the schema generated treat fields marked as required as non-optional. Defaults to false.\n --format=[groq-type-nodes] Format the schema as GROQ type nodes. Only available format at the moment.\n\nExamples\n # Extracts schema types in a Sanity project with more than one workspace\n sanity schema extract --workspace default\n`\n\nconst extractSchemaCommand: CliCommandDefinition = {\n name: 'extract',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/extractAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default extractSchemaCommand\n","export default {\n name: 'schema',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Interacts with Sanity Studio schema configurations',\n}\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type SchemaListFlags} from '../../actions/schema/schemaListAction'\n\nconst description = 'Lists all schemas in the current dataset.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --json get schemas as json\n --id <schema_id> fetch a specific schema by its ID\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n\nExamples\n # Get full json schemas\n sanity schema list --json\n\n # Get a specific schema by ID\n sanity schema list --id <schema_id>\n`\n\nconst fetchSchemaCommand = {\n name: 'list',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/schemaListAction')\n\n return mod.default(args as unknown as CliCommandArguments<SchemaListFlags>, context)\n },\n} satisfies CliCommandDefinition\n\nexport default fetchSchemaCommand\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type StoreManifestSchemasFlags} from '../../actions/schema/storeSchemasAction'\n\nconst description = 'Store schemas into workspace datasets.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions:\n --workspace <workspace_name> store schema for a specific workspace\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n --id-prefix <prefix> add a prefix to the schema ID\n --schema-required fail if schema file is not found\n --verbose print detailed information during store\n\nExamples\n # if no options are provided all workspace schemas will be stored\n sanity schema store\n # Store the schema for only the workspace 'default'\n sanity schema store --workspace default\n`\n\nconst storeSchemaCommand = {\n name: 'store',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/storeSchemasAction')\n\n const extendedArgs = {\n ...args,\n extOptions: {\n ...args.extOptions,\n 'schema-required': true,\n },\n }\n\n return mod.default(\n extendedArgs as unknown as CliCommandArguments<StoreManifestSchemasFlags>,\n context,\n )\n },\n} satisfies CliCommandDefinition\n\nexport default storeSchemaCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Validates all schema types specified in a workspace.'\n\nconst helpText = `\nOptions\n --workspace <name> The name of the workspace to use when validating all schema types.\n --format <pretty|ndjson|json> The output format used to print schema errors and warnings.\n --level <error|warning> The minimum level reported out. Defaults to warning.\n\nExamples\n # Validates all schema types in a Sanity project with more than one workspace\n sanity schema validate --workspace default\n\n # Save the results of the report into a file\n sanity schema validate > report.txt\n\n # Report out only errors\n sanity schema validate --level error\n`\n\nconst validateDocumentsCommand: CliCommandDefinition = {\n name: 'validate',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/validateAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default validateDocumentsCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\nimport {isInteractive} from '../../util/isInteractive'\nimport {getDevAction} from '../dev/devCommand'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new CORS-entry to be added.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity start --host=0.0.0.0\n sanity start --port=1942\n sanity start some/build-output-dir\n`\n\nconst startCommand: CliCommandDefinition = {\n name: 'start',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Alias for `sanity preview`',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const {output, chalk, prompt} = context\n const previewAction = await getPreviewAction()\n\n const warn = (msg: string) => output.warn(chalk.yellow.bgBlack(msg))\n const error = (msg: string) => output.warn(chalk.red.bgBlack(msg))\n warn('╭───────────────────────────────────────────────────────────╮')\n warn('│ │')\n warn(\"│ You're running Sanity Studio v3. In this version the │\")\n warn('│ [start] command is used to preview static builds. |')\n warn('│ │')\n warn('│ To run a development server, use the [npm run dev] or |')\n warn('│ [npx sanity dev] command instead. For more information, │')\n warn('│ see https://www.sanity.io/help/studio-v2-vs-v3 │')\n warn('│ │')\n warn('╰───────────────────────────────────────────────────────────╯')\n warn('') // Newline to separate from other output\n\n try {\n await previewAction(args, context)\n } catch (err) {\n if (err.name !== 'BUILD_NOT_FOUND') {\n throw err\n }\n\n error(err.message)\n error('\\n')\n\n const shouldRunDevServer =\n isInteractive &&\n (await prompt.single({\n message: 'Do you want to start a development server instead?',\n type: 'confirm',\n }))\n\n if (shouldRunDevServer) {\n const devAction = await getDevAction()\n await devAction(args, context)\n } else {\n // Indicate that this isn't an expected exit\n // eslint-disable-next-line no-process-exit\n process.exit(1)\n }\n }\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default startCommand\n","export function prettifyQuotaError(message: string) {\n return (err: Error & {statusCode?: number}): Error & {statusCode?: number} => {\n if (err.statusCode === 402) {\n err.message = message\n throw err\n }\n\n throw err\n }\n}\n","import {type CliCommandDefinition, type CliPrompter} from '@sanity/cli'\n\nimport {prettifyQuotaError} from '../../util/prettifyQuotaError'\nimport {type Role} from './types'\n\nconst helpText = `\nOptions\n --role Role to invite the user as\n\nExamples\n # Invite a new user to the project (prompt for details)\n sanity users invite\n\n # Send a new user invite to the email \"pippi@sanity.io\", prompt for role\n sanity users invite pippi@sanity.io\n\n # Send a new user invite to the email \"pippi@sanity.io\", as administrator\n sanity users invite pippi@sanity.io --role administrator\n`\n\ninterface InviteFlags {\n role?: string\n}\n\nconst inviteUserCommand: CliCommandDefinition<InviteFlags> = {\n name: 'invite',\n group: 'users',\n signature: '[EMAIL]',\n helpText,\n description: 'Invite a new user to the project',\n action: async (args, context) => {\n const {apiClient, output, prompt} = context\n const [selectedEmail] = args.argsWithoutOptions\n const flags = args.extOptions\n\n const client = apiClient().clone().config({useProjectHostname: false, apiVersion: '2021-06-07'})\n const {projectId} = client.config()\n const roles = (await client.request<Role[]>({uri: `/projects/${projectId}/roles`})).filter(\n (role) => role.appliesToUsers,\n )\n const email = selectedEmail || (await promptForEmail(prompt))\n const selectedRole = flags.role || (await promptForRole(prompt, roles))\n const role = roles.find(({name}) => name.toLowerCase() === selectedRole.toLowerCase())\n if (!role) {\n throw new Error(`Role name \"${selectedRole}\" not found`)\n }\n\n await client\n .clone()\n .request({\n method: 'POST',\n uri: `/invitations/project/${projectId}`,\n body: {email, role: role.name},\n useGlobalApi: true,\n maxRedirects: 0,\n })\n .catch(\n prettifyQuotaError(\n 'Project is already at user quota, add billing details to the project in order to allow overage charges.',\n ),\n )\n\n output.print(`Invitation sent to ${email}`)\n },\n}\n\nexport default inviteUserCommand\n\nfunction promptForEmail(prompt: CliPrompter): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Email to invite:',\n filter: (val) => val.trim(),\n validate: (name) => {\n if (!name || !name.includes('@')) {\n return 'Invalid email'\n }\n\n return true\n },\n })\n}\n\nfunction promptForRole(prompt: CliPrompter, roles: Role[]): Promise<string> {\n return prompt.single({\n type: 'list',\n message: 'Which role should the user have?',\n choices: roles.map((role) => ({\n value: role.name,\n name: `${role.title} (${role.description})`,\n })),\n })\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {size, sortBy} from 'lodash'\n\nimport {type Invite, type PartialProjectResponse, type User} from './types'\n\nconst sortFields = ['id', 'name', 'role', 'date']\n\nconst helpText = `\nOptions\n --no-invitations Don't include pending invitations\n --no-robots Don't include robots (token users)\n --sort <field> Sort users by specified column: ${sortFields.join(', ')}\n --order <asc/desc> Sort output ascending/descending\n\nExamples\n # List all users of the project\n sanity users list\n\n # List all users of the project, but exclude pending invitations and robots\n sanity users list --no-invitations --no-robots\n\n # List all users, sorted by role\n sanity users list --sort role\n`\n\nconst listUsersCommand: CliCommandDefinition = {\n name: 'list',\n group: 'users',\n signature: '',\n helpText,\n description: 'List all users of the project',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {sort, order, robots, invitations} = {\n sort: 'date',\n order: 'asc',\n robots: true,\n invitations: true,\n ...args.extOptions,\n }\n\n if (!sortFields.includes(sort)) {\n throw new Error(`Can't sort by field \"${sort}\". Must be one of ${sortFields.join(', ')}`)\n }\n\n if (order !== 'asc' && order !== 'desc') {\n throw new Error(`Unknown sort order \"${order}\", must be either \"asc\" or \"desc\"`)\n }\n\n const client = apiClient()\n const globalClient = client.clone().config({useProjectHostname: false})\n const {projectId} = client.config()\n\n const useGlobalApi = true\n const [pendingInvitations, project] = await Promise.all([\n invitations\n ? globalClient\n .request<Invite[]>({uri: `/invitations/project/${projectId}`, useGlobalApi})\n .then(getPendingInvitations)\n : [],\n globalClient.request<PartialProjectResponse>({uri: `/projects/${projectId}`, useGlobalApi}),\n ])\n\n const memberIds = project.members.map((member) => member.id)\n const users = await globalClient\n .request<User | User[]>({uri: `/users/${memberIds.join(',')}`, useGlobalApi})\n .then((user) => (Array.isArray(user) ? user : [user]))\n\n const projectMembers = project.members\n .map((member) => ({\n ...member,\n ...getUserProps(users.find((candidate) => candidate.id === member.id)),\n }))\n .filter((member) => !member.isRobot || robots)\n\n const members = [...projectMembers, ...pendingInvitations]\n\n const ordered = sortBy(\n members.map(({id, name, role, date}) => [id, name, role, date]),\n [sortFields.indexOf(sort)],\n )\n\n const rows = order === 'asc' ? ordered : ordered.reverse()\n\n const maxWidths = rows.reduce(\n (max, row) => row.map((current, index) => Math.max(size(current), max[index])),\n sortFields.map((str) => size(str)),\n )\n\n const printRow = (row: string[]) => {\n const isInvite = row[0] === '<pending>'\n const textRow = row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(' ')\n return isInvite ? chalk.dim(textRow) : textRow\n }\n\n output.print(chalk.cyan(printRow(sortFields)))\n rows.forEach((row) => output.print(printRow(row)))\n },\n}\n\nfunction getUserProps(user: User | undefined) {\n const {displayName: name, createdAt: date} = user || {}\n return {name: name || '', date: date || ''}\n}\n\nfunction getPendingInvitations(invitations: Invite[]) {\n return invitations\n .filter((invite) => !invite.isAccepted && !invite.isRevoked && !invite.acceptedByUserId)\n .map((invite) => ({\n id: '<pending>',\n name: invite.email,\n role: invite.role,\n date: invite.createdAt,\n }))\n}\n\nexport default listUsersCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nexport const usersGroup: CliCommandGroupDefinition = {\n name: 'users',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages users of your Sanity project',\n}\n\nexport default usersGroup\n","import {type CliCommandDefinition, type CliCommandGroupDefinition} from '@sanity/cli'\n\nimport {SCHEMA_STORE_ENABLED} from '../actions/schema/storeSchemasAction'\nimport appGroup from './app/appGroup'\nimport appBuildCommand from './app/buildCommand'\nimport appDeployCommand from './app/deployCommand'\nimport appDevCommand from './app/devCommand'\nimport appStartCommand from './app/startCommand'\nimport backupGroup from './backup/backupGroup'\nimport disableBackupCommand from './backup/disableBackupCommand'\nimport downloadBackupCommand from './backup/downloadBackupCommand'\nimport enableBackupCommand from './backup/enableBackupCommand'\nimport listBackupCommand from './backup/listBackupCommand'\nimport buildCommand from './build/buildCommand'\nimport addCorsOriginCommand from './cors/addCorsOriginCommand'\nimport corsGroup from './cors/corsGroup'\nimport deleteCorsOriginCommand from './cors/deleteCorsOriginCommand'\nimport listCorsOriginsCommand from './cors/listCorsOriginsCommand'\nimport aliasDatasetCommand from './dataset/alias/aliasCommands'\nimport copyDatasetCommand from './dataset/copyDatasetCommand'\nimport createDatasetCommand from './dataset/createDatasetCommand'\nimport datasetGroup from './dataset/datasetGroup'\nimport datasetVisibilityCommand from './dataset/datasetVisibilityCommand'\nimport deleteDatasetCommand from './dataset/deleteDatasetCommand'\nimport exportDatasetCommand from './dataset/exportDatasetCommand'\nimport importDatasetCommand from './dataset/importDatasetCommand'\nimport listDatasetsCommand from './dataset/listDatasetsCommand'\nimport deployCommand from './deploy/deployCommand'\nimport undeployCommand from './deploy/undeployCommand'\nimport devCommand from './dev/devCommand'\nimport createDocumentsCommand from './documents/createDocumentsCommand'\nimport deleteDocumentsCommand from './documents/deleteDocumentsCommand'\nimport documentsGroup from './documents/documentsGroup'\nimport getDocumentsCommand from './documents/getDocumentsCommand'\nimport queryDocumentsCommand from './documents/queryDocumentsCommand'\nimport validateDocumentsCommand from './documents/validateDocumentsCommand'\nimport execCommand from './exec/execCommand'\nimport deleteGraphQLAPICommand from './graphql/deleteGraphQLAPICommand'\nimport deployGraphQLAPICommand from './graphql/deployGraphQLAPICommand'\nimport graphqlGroup from './graphql/graphqlGroup'\nimport listGraphQLAPIsCommand from './graphql/listGraphQLAPIsCommand'\nimport createHookCommand from './hook/createHookCommand'\nimport deleteHookCommand from './hook/deleteHookCommand'\nimport hookGroup from './hook/hookGroup'\nimport listHookLogsCommand from './hook/listHookLogsCommand'\nimport listHooksCommand from './hook/listHooksCommand'\nimport printHookAttemptCommand from './hook/printHookAttemptCommand'\nimport extractManifestCommand from './manifest/extractManifestCommand'\nimport manifestGroup from './manifest/manifestGroup'\nimport createMigrationCommand from './migration/createMigrationCommand'\nimport listMigrationsCommand from './migration/listMigrationsCommand'\nimport migrationGroup from './migration/migrationGroup'\nimport runMigrationCommand from './migration/runMigrationCommand'\nimport previewCommand from './preview/previewCommand'\nimport deleteSchemaCommand from './schema/deleteSchemaCommand'\nimport extractSchemaCommand from './schema/extractSchemaCommand'\nimport schemaGroup from './schema/schemaGroup'\nimport fetchSchemaCommand from './schema/schemaListCommand'\nimport storeSchemaCommand from './schema/storeSchemaCommand'\nimport validateSchemaCommand from './schema/validateSchemaCommand'\nimport startCommand from './start/startCommand'\nimport inviteUserCommand from './users/inviteUserCommand'\nimport listUsersCommand from './users/listUsersCommand'\nimport usersGroup from './users/usersGroup'\n\n// Base commands that are always included\nconst baseCommands: (CliCommandDefinition | CliCommandGroupDefinition)[] = [\n appGroup,\n appDeployCommand,\n appDevCommand,\n appBuildCommand,\n appStartCommand,\n buildCommand,\n datasetGroup,\n deployCommand,\n undeployCommand,\n listDatasetsCommand,\n createDatasetCommand,\n datasetVisibilityCommand,\n exportDatasetCommand,\n importDatasetCommand,\n deleteDatasetCommand,\n copyDatasetCommand,\n aliasDatasetCommand,\n backupGroup,\n listBackupCommand,\n downloadBackupCommand,\n disableBackupCommand,\n enableBackupCommand,\n corsGroup,\n listCorsOriginsCommand,\n addCorsOriginCommand,\n deleteCorsOriginCommand,\n usersGroup,\n inviteUserCommand,\n listUsersCommand,\n hookGroup,\n listHooksCommand,\n createHookCommand,\n migrationGroup,\n createMigrationCommand,\n runMigrationCommand,\n listMigrationsCommand,\n deleteHookCommand,\n listHookLogsCommand,\n printHookAttemptCommand,\n documentsGroup,\n getDocumentsCommand,\n queryDocumentsCommand,\n deleteDocumentsCommand,\n createDocumentsCommand,\n validateDocumentsCommand,\n graphqlGroup,\n listGraphQLAPIsCommand,\n deployGraphQLAPICommand,\n deleteGraphQLAPICommand,\n devCommand,\n startCommand,\n schemaGroup,\n validateSchemaCommand,\n extractSchemaCommand,\n previewCommand,\n execCommand,\n manifestGroup,\n extractManifestCommand,\n]\n\n// Internal schema commands that are only included when enabled\nconst internalSchemaCommands = [fetchSchemaCommand, storeSchemaCommand, deleteSchemaCommand]\n\n// Include experimental commands only when the feature flag is enabled\nconst commands: (CliCommandDefinition | CliCommandGroupDefinition)[] = [\n ...baseCommands,\n ...(SCHEMA_STORE_ENABLED ? internalSchemaCommands : []),\n]\n\n/**\n * @deprecated Not actually deprecated, but these are internals and should not be relied upon outside of the Sanity team\n * @internal\n */\nexport const cliProjectCommands = {\n requiredCliVersionRange: '^3.0.0',\n commands,\n}\n"],"names":["getTimer","timings","startTimes","start","name","Error","performance","now","end","getTimings","MANIFEST_FILENAME","SCHEMA_FILENAME_SUFFIX","TOOLS_FILENAME_SUFFIX","FEATURE_ENABLED_ENV_NAME","EXTRACT_MANIFEST_ENABLED","process","env","EXTRACT_MANIFEST_LOG_ERRORS","SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS","CREATE_TIMER","EXTRACT_TASK_TIMEOUT_MS","minutesToMilliseconds","EXTRACT_FAILURE_MESSAGE","extractManifestSafe","args","context","extractManifest","err","output","error","workDir","flags","extOptions","defaultOutputDir","resolve","join","outputDir","defaultStaticPath","staticPath","path","rootPkgPath","readPkgUp","sync","cwd","__dirname","timer","spinner","workspaceManifests","getWorkspaceManifests","mkdir","recursive","workspaceFiles","writeWorkspaceFiles","manifest","version","createdAt","Date","toISOString","workspaces","writeFile","JSON","stringify","manifestDuration","succeed","toFixed","fail","message","print","chalk","gray","workerPath","dirname","worker","Worker","workerData","timeout","timeoutId","setTimeout","terminate","Promise","resolveWorkspaces","reject","buffer","addListener","push","exitCode","clearTimeout","manifestWorkspaces","reduce","workspace","writeWorkspaceFile","all","schemaFilename","toolsFilename","createFile","schema","tools","content","filenameSuffix","stringifiedContent","filename","createHash","update","digest","slice","SANITY_WORKSPACE_SCHEMA_TYPE","printSchemaList","schemas","ordered","sortBy","map","_createdAt","_id","id","dataset","projectId","String","headings","rows","reverse","maxWidths","max","row","current","index","Math","size","str","printRow","col","i","padEnd","cyan","forEach","schemaListAction","SCHEMA_STORE_ENABLED","apiClient","client","requireUser","requireProject","withConfig","apiVersion","config","manifestDir","manifestPath","getManifestPath","readManifest","allSettled","uniqBy","throwIfProjectIdMismatch","getDocument","useCdn","fetch","type","result","status","red","reason","value","flat","length","json","customPath","readAndParseManifest","readFileSync","lastModified","statSync","mtime","parse","readPath","text","groupOrCommand","argv","argsWithoutOptions","extraArguments","retryError","errorMessage","storeSchemasAction","schemaRequired","workspaceName","idPrefix","verbose","storedCount","saveSchema","transaction","createOrReplace","_type","commit","workspaceToSave","find","appGroup","signature","isGroupRoot","description","helpText","appBuildCommand","group","action","overrides","getBuildAction","default","appDeployCommand","appDevCommand","getDevAction","isInteractive","stdout","isTTY","TERM","devCommand","appStartCommand","prompt","previewAction","getPreviewAction","msg","warn","bgBlack","single","exit","defaultApiVersion","datasetBackupGroup","parseApiErr","apiErr","code","statusCode","statusMessage","response","body","data","debug","debugIt","validateDatasetName","datasetName","toLowerCase","MAX_DATASET_NAME_LENGTH","test","promptForDatasetName","options","validate","chooseDatasetPrompt","allowCreation","datasets","list","hasProduction","datasetChoices","selected","choices","Separator","newDatasetName","undefined","create","resolveApiClient","token","selectedDataset","disableDatasetBackupCommand","request","method","headers","Authorization","uri","enabled","green","require","archiver","archiveDir","tmpOutDir","outFilePath","progressCb","archiveDestination","createWriteStream","on","archive","gzip","gzipOptions","level","zlib","constants","Z_DEFAULT_COMPRESSION","stack","progress","fs","processedBytes","pipe","directory","finalize","maxBackupIdsShown","chooseBackupIdPrompt","query","limit","toString","backups","backupIdChoices","backup","cleanupTmpDir","tmpDir","rimraf","MAX_RETRIES","BACKOFF_DELAY_BASE","exponentialBackoff","retryCount","pow","withRetry","operation","maxRetries","retryDelay","CONNECTION_TIMEOUT","READ_TIMEOUT","getIt","keepAlive","promise","downloadAsset","url","fileName","fileType","outDir","normalizedFileName","basename","assetFilePath","getAssetFilePath","maxRedirects","connect","socket","stream","pipeline","downloadDocument","PaginatedGetBackupStream","Readable","cursor","totalFiles","constructor","backupId","objectMode","_read","fetchNextBackupPage","files","file","nextCursor","destroy","newProgress","startStep","lastProgress","step","elapsed","prettyMs","total","set","humanFileSize","floor","log","isPathDirName","filepath","createDebug","DEFAULT_DOWNLOAD_CONCURRENCY","MAX_DOWNLOAD_CONCURRENCY","parseCliFlags","yargs","hideBin","downloadBackupCommand","opts","prepareBackupOptions","outFileName","bold","progressSpinner","mkdtemp","tmpdir","dir","mkdirSync","tmpOutDocumentsFile","docOutStream","docWriteMutex","Mutex","backupFileStream","totalItemsDownloaded","pMap","doc","runExclusive","write","concurrency","finished","isString","defaultOutFileName","out","absolutify","filter","overwrite","existsSync","enableDatasetBackupCommand","DEFAULT_LIST_BACKUP_LIMIT","alias","listDatasetBackupCommand","Number","MAX_SAFE_INTEGER","before","after","parsedBefore","processDateFlags","parsedAfter","isAfter","table","Table","columns","title","alignment","addRow","resource","lightFormat","printTable","date","parsedDate","isValid","buildCommand","wildcardReplacement","portReplacement","addCorsOrigin","givenOrigin","origin","filterAndValidateOrigin","promptForOrigin","hasWildcard","includes","promptForWildcardConfirmation","allowCredentials","credentials","promptForCredentials","Boolean","oneline","yellow","logSymbols","warning","underline","replace","filterOrigin","validateOrigin","example","parsed","host","protocol","RegExp","addCorsOriginCommand","corsGroup","deleteCorsOriginCommand","originId","specified","specifiedOrigin","origins","listCorsOriginsCommand","validateDatasetAliasName","promptForDatasetAliasName","ALIAS_PREFIX","listAliases","createAlias","aliasName","modify","updateAlias","unlinkAlias","removeAlias","createAliasHandler","targetDataset","nameError","aliases","projectFeatures","then","sets","ds","aliasClient","aliasOutputName","startsWith","datasetErr","option","deleteAliasHandler","force","dsError","fetchedAliases","linkedAlias","elem","input","trim","linkAliasHandler","da","unlinkAliasHandler","aliasCommand","verb","listDatasetCopyJobs","offset","job","state","updatedAt","sourceDataset","withHistory","timeStarted","formatDistanceToNow","parseISO","timeTaken","formatDistance","color","getClientUrl","cdnUrl","Observable","observer","progressSource","EventSource","stopped","onError","close","next","onChannelError","onMessage","event","onComplete","removeEventListener","complete","addEventListener","followProgress","jobId","currentProgress","listenUrl","subscribe","copyDatasetCommand","attach","shouldSkipHistory","existingDatasets","sourceDatasetName","targetDatasetName","skipHistory","detach","allowedModes","createDatasetCommand","visibility","canCreatePrivate","aclMode","promptForDatasetVisibility","mode","datasetVisibilityCommand","edit","curr","deleteDatasetCommand","delete","noop","parseFlags","rawFlags","types","split","assetConcurrency","parseInt","raw","assets","drafts","compress","exportDatasetCommand","targetDestination","destinationPath","outputPath","getOutputPath","currentStep","onProgress","exportDataset","destination","dstPath","isAbsolute","dstStats","stat","catch","looksLikeFile","isFile","indexOf","createPath","finalPath","toBoolIfSet","flag","allowAssetsInDifferentDataset","allowFailingAssets","replaceAssets","skipCrossDatasetReferences","allowSystemDocuments","missing","importDatasetCommand","fromInitCommand","getMutationOperation","target","determineTargetDataset","isUrl","inputStream","assetsBase","sourceIsFolder","getUrlStream","sourceFile","fileStats","isDirectory","createReadStream","importClient","clone","stepStart","spinInterval","percent","lengthComputable","sameStep","getPercentage","clearInterval","prevStep","prevStepStart","timeSpent","secondsDecimalDigits","setInterval","endTask","success","numDocs","warnings","sanityImport","printWarnings","details","responseBody","padStart","onlyBody","assetFails","bind","listAliasesHandler","listDatasetsCommand","deployCommand","undeployCommand","createDocumentsCommand","watch","useJson5","json5","contentPath","readFile","writeDocuments","getResultMessage","docId","uuid","ext","tmpFile","os","defaultValue","editor","getEditor","registerUnlinkOnSigInt","chokidar","readAndPerformCreatesFromFile","execa","bin","concat","stdio","unlink","filePath","isEqual","writeResult","documents","docs","Array","isArray","mutations","validateDocument","isIdentifiedSanityDocument","createIfNotExists","arr","isSingle","isPlainObject","getErrorMessage","isSanityDocumentish","joiner","results","res","created","skipped","defaultEditor","platform","VISUAL","EDITOR","shift","deleteDocumentsCommand","ids","trx","deleted","notFound","pluralize","documentsGroup","identity","inp","colorizeJson","formatters","punctuator","white","key","string","number","literal","whitespace","tokenize","prevToken","getDocumentsCommand","pretty","project","anonymous","cliConfig","requireDataset","api","baseClient","originalDataset","originalProjectId","fallbackApiVersion","SANITY_CLI_QUERY_API_VERSION","validateDocumentsCommand","execCommand","deleteGraphQLAPICommand","deployGraphQLAPICommand","graphqlGroup","listGraphQLAPIsCommand","createHookCommand","manageUrl","projects","getById","organizationId","open","deleteHookCommand","hookId","promptForHook","specifiedName","hooks","hook","hookGroup","printHookAttemptCommand","attemptId","attempt","resultCode","resultBody","failureReason","inProgress","getStatus","isFailure","formatFailure","includeHelp","help","listHookLogsCommand","messages","attempts","groupedAttempts","groupBy","populated","totalMessages","printMessage","detailed","printSeparator","skip","failureCount","inspect","payload","colors","prefix","failure","duration","listHooksCommand","httpMethod","extractManifestCommand","extractError","MIGRATIONS_DIRECTORY","MIGRATION_SCRIPT_EXTENSIONS","minimalAdvanced","migrationName","documentTypes","t","minimalSimple","renameField","renameType","stringToPTE","TEMPLATES","template","createMigrationCommand","suffix","templatesByName","Object","fromEntries","definedTemplate","sluggedName","deburr","destDir","renderedTemplate","definitionFile","resolveMigrationScript","flatMap","location","relativePath","absolutePath","mod","isLoadableMigrationScript","script","migrate","listMigrationCommand","_","migrations","resolveMigrations","definedMigration","migration","unregister","register","supported","migrationsDir","migrationEntries","readdir","withFileTypes","entry","entryName","removeMigrationScriptExtension","candidates","candidate","endsWith","pathToString","segment","isIndexSegment","isKeySegment","_key","isIndexTuple","from","to","maxKeyLength","children","depth","entries","child","formatTree","node","paddingLength","indent","getNodes","getLeaves","nodes","getMessage","isLast","nextIndent","leaves","nested","first","rest","firstPadding","repeat","elbow","subsequentPadding","firstMessage","subsequentMessages","marker","convertToTree","root","addNode","tree","isTty","isatty","prettyFormat","subject","indentSize","subjectEntry","badge","prettyFormatMutation","encodeItemRef","ref","badgeStyle","variant","info","bgWhite","black","incremental","bgGreen","maybeDestructive","bgYellow","destructive","bgRed","label","mutationImpact","patch","documentId","mutation","document","listFormatter","Intl","ListFormat","mutationHeader","mutationType","documentType","format","lock","ifRevision","header","padding","patches","formatPatchMutation","op","formattedType","amount","position","referenceItem","items","startIndex","endIndex","line","MAX_MUTATION_CONCURRENCY","DEFAULT_MUTATION_CONCURRENCY","runMigrationCommand","migrationsDirectoryPath","fromExport","dry","dryRun","resolvedScripts","relative","projectConfig","apiConfig","apiHost","confirm","run","createProgress","stop","done","completedTransactions","stopAndPersist","symbol","currentTransactions","blue","pending","dryRunHandler","exportPath","previewCommand","deleteSchemaCommand","extractSchemaCommand","fetchSchemaCommand","storeSchemaCommand","extendedArgs","startCommand","prettifyQuotaError","inviteUserCommand","selectedEmail","useProjectHostname","roles","role","appliesToUsers","email","promptForEmail","selectedRole","promptForRole","useGlobalApi","val","sortFields","listUsersCommand","sort","order","robots","invitations","globalClient","pendingInvitations","getPendingInvitations","memberIds","members","member","users","user","getUserProps","isRobot","isInvite","textRow","dim","displayName","invite","isAccepted","isRevoked","acceptedByUserId","usersGroup","baseCommands","datasetGroup","aliasDatasetCommand","backupGroup","listBackupCommand","disableBackupCommand","enableBackupCommand","migrationGroup","listMigrationsCommand","queryDocumentsCommand","schemaGroup","validateSchemaCommand","manifestGroup","internalSchemaCommands","commands","cliProjectCommands","requiredCliVersionRange"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQO,SAASA,WAAyB;AACvC,QAAMC,UAAkC,IAClCC,aAAqC,CAAC;AAE5C,WAASC,MAAMC,MAAoB;AAC7B,QAAA,OAAOF,WAAWE,IAAI,IAAM;AAC9B,YAAM,IAAIC,MAAM,UAAUD,IAAI,qCAAqC;AAG1DA,eAAAA,IAAI,IAAIE,gBAAAA,YAAYC,IAAI;AAAA,EAAA;AAGrC,WAASC,IAAIJ,MAAsB;AAC7B,QAAA,OAAOF,WAAWE,IAAI,IAAM;AAC9B,YAAM,IAAIC,MAAM,UAAUD,IAAI,6BAA6B;AAGrDA,WAAAA,QAAAA,IAAI,IAAIE,4BAAYC,QAAQL,WAAWE,IAAI,GAC5CH,QAAQG,IAAI;AAAA,EAAA;AAGd,SAAA;AAAA,IAACD;AAAAA,IAAOK;AAAAA,IAAKC,YAAYA,MAAMR;AAAAA,EAAO;AAC/C;ACZO,MAAMS,oBAAoB,wBAC3BC,yBAAyB,uBACzBC,wBAAwB,sBAGxBC,6BAA2B,uCAC3BC,2BAA2BC,QAAQC,IAAIH,0BAAwB,MAAM,SACrEI,8BAA8BF,QAAQC,IAAIE,2CAA2C,QAErFC,eAAe,mBAEfC,0BAA0BC,QAAAA,sBAAsB,CAAC,GAEjDC,0BACJ;AAAA,8BAC+BT,0BAAwB;AAUnCU,eAAAA,oBACpBC,MACAC,SAC4B;AACvBX,MAAAA;AAID,QAAA;AACIY,YAAAA,gBAAgBF,MAAMC,OAAO;AACnC;AAAA,aACOE,KAAK;AACZ,aAAIV,+BACFQ,QAAQG,OAAOC,MAAMF,GAAG,GAEnBA;AAAAA,IAAAA;AAEX;AAEA,eAAeD,gBACbF,MACAC,SACe;AACT,QAAA;AAAA,IAACG;AAAAA,IAAQE;AAAAA,EAAWL,IAAAA,SAEpBM,QAAQP,KAAKQ,YACbC,mBAAmBC,KAAAA,QAAQC,KAAKL,KAAAA,SAAS,MAAM,CAAC,GAEhDM,YAAYF,aAAQD,gBAAgB,GACpCI,oBAAoBF,KAAAA,KAAKC,WAAW,QAAQ,GAE5CE,aAAaP,MAAMQ,QAAQF,mBAE3BE,SAAOJ,KAAAA,KAAKG,YAAY5B,iBAAiB,GAEzC8B,cAAcC,mBAAAA,QAAUC,KAAK;AAAA,IAACC,KAAKC;AAAAA,EAAU,CAAA,GAAGL;AACtD,MAAI,CAACC;AACG,UAAA,IAAInC,MAAM,oDAAoD;AAGtE,QAAMwC,QAAQ7C,SAAS;AACvB6C,QAAM1C,MAAMgB,YAAY;AACxB,QAAM2B,UAAUlB,OAAOkB,QAAQ,CAAA,CAAE,EAAE3C,MAAM,qBAAqB;AAE1D,MAAA;AACI4C,UAAAA,qBAAqB,MAAMC,sBAAsB;AAAA,MAACR;AAAAA,MAAaV;AAAAA,IAAAA,CAAQ;AAC7E,UAAMmB,GAAAA,MAAMX,YAAY;AAAA,MAACY,WAAW;AAAA,IAAA,CAAK;AAEzC,UAAMC,iBAAiB,MAAMC,oBAAoBL,oBAAoBT,UAAU,GAEzEe,WAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAM/BC,SAAS;AAAA,MACTC,YAAW,oBAAIC,KAAK,GAAEC,YAAY;AAAA,MAClCC,YAAYP;AAAAA,IACd;AAEA,UAAMQ,GAAAA,UAAUpB,QAAMqB,KAAKC,UAAUR,UAAU,MAAM,CAAC,CAAC;AACjDS,UAAAA,mBAAmBjB,MAAMrC,IAAIW,YAAY;AAE/C2B,YAAQiB,QAAQ,uBAAuBD,iBAAiBE,QAAAA,CAAS,KAAK;AAAA,WAC/DrC,KAAK;AACJsC,UAAAA,QAAAA,KAAKtC,IAAIuC,OAAO,GACxBtC,OAAOuC,MAAMC,uBAAMC,KAAK/C,uBAAuB,CAAC,GAC1CK;AAAAA,EAAAA;AAEV;AAEA,eAAeqB,sBAAsB;AAAA,EACnCR;AAAAA,EACAV;AAIF,GAAuC;AACrC,QAAMwC,aAAanC,KAAAA,KACjBoC,KAAQ/B,QAAAA,WAAW,GACnB,OACA,aACA,OACA,WACA,oBACF,GAEMgC,SAAS,IAAIC,oBAAAA,OAAOH,YAAY;AAAA,IACpCI,YAAY;AAAA,MAAC5C;AAAAA,IAAO;AAAA;AAAA,IAEpBd,KAAKD,QAAQC;AAAAA,EAAAA,CACd;AAED,MAAI2D,UAAU;AACRC,QAAAA,YAAYC,WAAW,MAAM;AACvB,cAAA,IACVL,OAAOM,UAAU;AAAA,KAChB1D,uBAAuB;AAEtB,MAAA;AACF,WAAO,MAAM,IAAI2D,QAAmC,CAACC,mBAAmBC,WAAW;AACjF,YAAMC,SAAoC,CAAE;AACrCC,aAAAA,YAAY,WAAYjB,CAAAA,YAAYgB,OAAOE,KAAKlB,OAAO,CAAC,GAC/DM,OAAOW,YAAY,QAASE,CAAa,aAAA;AACnCA,qBAAa,IACfL,kBAAkBE,MAAM,IACfP,WACTM,OAAO,IAAI5E,MAAM,sCAAsCe,uBAAuB,IAAI,CAAC;AAAA,MAEtF,CAAA,GACDoD,OAAOW,YAAY,SAASF,MAAM;AAAA,IAAA,CACnC;AAAA,EAAA,UACO;AACRK,iBAAaV,SAAS;AAAA,EAAA;AAE1B;AAEA,SAASxB,oBACPmC,oBACAjD,YACkC;AAClC,QAAMV,SAAS2D,mBAAmBC,OAChC,CAAC9B,YAAY+B,cACJ,CAAC,GAAG/B,YAAYgC,mBAAmBD,WAAWnD,UAAU,CAAC,GAElE,CAAA,CACF;AACOyC,SAAAA,QAAQY,IAAI/D,MAAM;AAC3B;AAEA,eAAe8D,mBACbD,WACAnD,YACgC;AAC1B,QAAA,CAACsD,gBAAgBC,aAAa,IAAI,MAAMd,QAAQY,IAAI,CACxDG,WAAWxD,YAAYmD,UAAUM,QAAQpF,sBAAsB,GAC/DmF,WAAWxD,YAAYmD,UAAUO,OAAOpF,qBAAqB,CAAC,CAC/D;AAEM,SAAA;AAAA,IACL,GAAG6E;AAAAA,IACHM,QAAQH;AAAAA,IACRI,OAAOH;AAAAA,EACT;AACF;AAEA,MAAMC,aAAa,OAAOvD,QAAc0D,SAAcC,mBAA2B;AACzEC,QAAAA,qBAAqBvC,KAAKC,UAAUoC,SAAS,MAAM,CAAC,GAEpDG,WAAW,GADJC,uBAAW,MAAM,EAAEC,OAAOH,kBAAkB,EAAEI,OAAO,KAAK,EAC9CC,MAAM,GAAG,CAAC,CAAC,GAAGN,cAAc;AAGrD,SAAA,MAAMvC,aAAUxB,UAAKI,QAAM6D,QAAQ,GAAGD,kBAAkB,GAEjDC;AACT;;;;;;AC5KO,MAAMK,+BAA+B,2BAEtCC,kBAAkBA,CAAC;AAAA,EACvBC;AAAAA,EACA/E;AAC2D,MAAM;AACjE,QAAMgF,UAAUC,gBAAAA,QACdF,QAAQG,IAAI,CAAC;AAAA,IAACC,YAAYxD;AAAAA,IAAWyD,KAAKC;AAAAA,IAAIxB;AAAAA,EAAAA,MACrC,CAACwB,IAAIxB,UAAUrF,MAAMqF,UAAUyB,SAASzB,UAAU0B,WAAW5D,SAAS,EAAEuD,IAAIM,MAAM,CAC1F,GACD,CAAC,WAAW,CACd,GACMC,WAAW,CAAC,MAAM,aAAa,WAAW,aAAa,WAAW,GAClEC,OAAOV,QAAQW,WAEfC,YAAYF,KAAK9B,OACrB,CAACiC,KAAKC,QAAQA,IAAIZ,IAAI,CAACa,SAASC,UAAUC,KAAKJ,IAAIK,cAAAA,QAAKH,OAAO,GAAGF,IAAIG,KAAK,CAAC,CAAC,GAC7EP,SAASP,IAAKiB,CAAAA,QAAQD,cAAAA,QAAKC,GAAG,CAAC,CACjC,GAEMC,WAAYN,CAAkBA,QAAAA,IAAIZ,IAAI,CAACmB,KAAKC,MAAM,GAAGD,GAAG,GAAGE,OAAOX,UAAUU,CAAC,CAAC,CAAC,EAAE/F,KAAK,KAAK;AAEjGP,SAAOuC,MAAMC,eAAMgE,QAAAA,KAAKJ,SAASX,QAAQ,CAAC,CAAC,GAC3CC,KAAKe,QAASX,SAAQ9F,OAAOuC,MAAM6D,SAASN,GAAG,CAAC,CAAC;AACnD;AAE8BY,eAAAA,iBAC5B9G,MACAC,SACe;AACf,MAAI,CAAC8G;AACH;AAGF,QAAMxG,QAAQP,KAAKQ;AACnB,MAAI,OAAOD,MAAMkF,MAAO,UAAiB,OAAA,IAAI5G,MAAM,oBAAoB;AACnE,MAAA,OAAO0B,MAAM,cAAc,KAAM,UAAiB,OAAA,IAAI1B,MAAM,6BAA6B;AAEvF,QAAA;AAAA,IAACmI;AAAAA,IAAW5G;AAAAA,EAAAA,IAAUH,SACtBgH,SAASD,UAAU;AAAA,IACvBE,aAAa;AAAA,IACbC,gBAAgB;AAAA,EACjB,CAAA,EAAEC,WAAW;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc,GAEnC1B,YAAYsB,OAAOK,SAAS3B,WAC5BD,UAAUuB,OAAOK,OAAAA,EAAS5B;AAE5B,MAAA,CAACC,aAAa,CAACD,SAAS;AAC1BtF,WAAOC,MAAM,yCAAyC;AACtD;AAAA,EAAA;AAGF,QAAMkH,cAAchH,MAAM,cAAc,GAClCiH,eAAeC,gBAAgBxH,SAASsH,WAAW,GACnD1F,WAAW,MAAM6F,aAAaF,cAAcvH,OAAO,GA6BnDkF,WA1BU,MAAM5B,QAAQoE,WAC5BC,gBAAAA,QAA8B/F,SAASK,YAAY,SAAS,EAAEoD,IAAI,OAAOrB,eACvE4D,yBAAyB5D,WAAW0B,SAAS,GACzCpF,MAAMkF,KAED,MAAMwB,OACVG,WAAW;AAAA,IACV1B,SAASzB,UAAUyB;AAAAA,IACnBC,WAAW1B,UAAU0B;AAAAA,EAAAA,CACtB,EACAmC,YAAYvH,MAAMkF,EAAE,IAGlB,MAAMwB,OACVG,WAAW;AAAA,IACV1B,SAASzB,UAAUyB;AAAAA,IACnBC,WAAW1B,UAAU0B;AAAAA,IACrBoC,QAAQ;AAAA,EAAA,CACT,EACAC,MAAwB,qBAAqB;AAAA,IAC5CC,MAAMhD;AAAAA,EACP,CAAA,EACJ,CACH,GAIGK,IAAI,CAAC4C,QAAQ9B,UAAU;AAClB8B,QAAAA,OAAOC,WAAW,YAAY;AAC1BlE,YAAAA,YAAYpC,SAASK,WAAWkE,KAAK;AAC3ChG,aAAAA,OAAOC,MACLuC,eAAAA,QAAMwF,IACJ,0CAA0CnE,UAAUrF,IAAI,MAAMsJ,OAAOG,OAAO3F,OAAO,EACrF,CACF,GACO,CAAE;AAAA,IAAA;AAEX,WAAOwF,OAAOI;AAAAA,EACf,CAAA,EACAC,KAAK;AAEJpD,MAAAA,QAAQqD,WAAW,GAAG;AACxBpI,WAAOC,MAAM,kBAAkB;AAC/B;AAAA,EAAA;AAGEE,QAAMkI,OACRrI,OAAOuC,MAAM,GAAGP,KAAKC,UAAU9B,MAAMkF,KAAKN,QAAQ,CAAC,IAAIA,SAAS,MAAM,CAAC,CAAC,EAAE,IAE1ED,gBAAgB;AAAA,IAACC;AAAAA,IAAsC/E;AAAAA,EAAAA,CAAO;AAElE;;;;;;ACxHA,MAAMf,2BAA2B,mCACpB0H,uBAAuBxH,QAAQC,IAAIH,wBAAwB,MAAM,QAUjEoI,kBAAkBA,CAACxH,SAA4ByI,eAAwB;AAClF,QAAMjI,mBAAmBC,KAAAA,QAAQC,KAAAA,KAAKV,QAAQK,SAAS,MAAM,CAAC,GAExDM,YAAYF,KAAQD,QAAAA,gBAAgB,GACpCI,oBAAoBF,UAAKC,WAAW,QAAQ,GAE5CE,aAAa4H,cAAc7H;AAEjC,SADqBE,cAAKL,QAAAA,QAAQnB,QAAQ4B,IAAAA,GAAOL,UAAU;AAE7D,GAKM6H,uBAAuBA,CAACnB,cAAsBvH,YAA+B;AAC3EwE,QAAAA,UAAUmE,KAAAA,aAAapB,cAAc,OAAO,GAE5CqB,eADQC,KAAAA,SAAStB,YAAY,EACRuB,MAAM9G,YAAY;AACrC7B,SAAAA,QAAAA,OAAOuC,MACbC,eAAAA,QAAMC,KAAK;AAAA,4BAA0B2E,YAAY,oBAAoBqB,YAAY,GAAG,CACtF,GACOzG,KAAK4G,MAAMvE,OAAO;AAC3B,GAEaiD,eAAe,OAAOuB,UAAkBhJ,SAA4BqB,YAAkB;AACjG,QAAMkG,eAAe,GAAGyB,QAAQ,IAAI/J,iBAAiB;AAEjD,MAAA;AACKyJ,WAAAA,qBAAqBnB,cAAcvH,OAAO;AAAA,EAAA,QACnC;AAELiJ,YAAAA,OAAO,mDAEhB,MAAMnJ,oBACJ;AAAA,MACES,YAAY;AAAA,QAACO,MAAMkI;AAAAA,MAAQ;AAAA,MAC3BE,gBAAgB;AAAA,MAChBC,MAAM,CAAE;AAAA,MACRC,oBAAoB,CAAE;AAAA,MACtBC,gBAAgB,CAAA;AAAA,OAElBrJ,OACF;AAGI,QAAA;AACK0I,aAAAA,qBAAqBnB,cAAcvH,OAAO;AAAA,aAC1CsJ,YAAY;AACbC,YAAAA,eAAe,8BAA8BhC,YAAY;AAC/DlG,YAAAA,SAASmB,KAAK+G,YAAY,GAE1BvJ,QAAQG,OAAOC,MAAMmJ,YAAY,GAC3BD;AAAAA,IAAAA;AAAAA,EACR;AAEJ,GAGa1B,2BAA2BA,CACtC5D,WACA0B,cACS;AACT,MAAI1B,UAAU0B,cAAcA;AACpB,UAAA,IAAI9G,MACR,uDAAkDoF,UAAUrF,IAAI,oBAAoBqF,UAAU0B,SAAS,EACzG;AAEJ;AAE8B8D,eAAAA,mBAC5BzJ,MACAC,SAC4B;AAC5B,MAAI,CAAC8G;AACH;AAGF,QAAMxG,QAAQP,KAAKQ,YAEbkJ,iBAAiBnJ,MAAM,iBAAiB,GACxCoJ,gBAAgBpJ,MAAM0D,WACtB2F,WAAWrJ,MAAM,WAAW,GAC5BsJ,UAAUtJ,MAAMsJ,SAChBtC,cAAchH,MAAM,cAAc;AAExC,MAAI,OAAOgH,eAAgB,UAAiB,OAAA,IAAI1I,MAAM,6BAA6B;AACnF,MAAI,OAAO+K,YAAa,UAAiB,OAAA,IAAI/K,MAAM,oBAAoB;AACvE,MAAI,OAAO8K,iBAAkB,UAAiB,OAAA,IAAI9K,MAAM,oBAAoB;AAEtE,QAAA;AAAA,IAACuB;AAAAA,IAAQ4G;AAAAA,EAAa/G,IAAAA,SAEtBqB,UAAUlB,OAAOkB,QAAQ,CAAA,CAAE,EAAE3C,MAAM,iBAAiB,GAEpD6I,eAAeC,gBAAgBxH,SAASsH,WAAW;AAErD,MAAA;AACF,UAAMN,SAASD,UAAU;AAAA,MACvBE,aAAa;AAAA,MACbC,gBAAgB;AAAA,IACjB,CAAA,EAAEC,WAAW;AAAA,MAACC,YAAY;AAAA,IAAc,CAAA,GAEnC1B,YAAYsB,OAAOK,OAAS3B,EAAAA;AAClC,QAAI,CAACA,UAAiB,OAAA,IAAI9G,MAAM,2BAA2B;AAE3D,UAAMgD,WAAW,MAAM6F,aAAaF,cAAcvH,SAASqB,OAAO;AAElE,QAAIwI,cAAc,GAEdzJ;AAEE0J,UAAAA,aAAa,OAAO9F,cAAqC;AACvDwB,YAAAA,KAAK,GAAGmE,WAAW,GAAGA,QAAQ,MAAM,EAAE,GAAG3E,4BAA4B,IAAIhB,UAAUrF,IAAI;AACzF,UAAA;AACFiJ,iCAAyB5D,WAAW0B,SAAS;AACvCpB,cAAAA,SAASnC,KAAK4G,MAClBJ,KAAa,aAAA,GAAGpB,YAAY,IAAIvD,UAAUM,MAAM,IAAI,OAAO,CAC7D;AACA,cAAM0C,OACHG,WAAW;AAAA,UACV1B,SAASzB,UAAUyB;AAAAA,UACnBC,WAAW1B,UAAU0B;AAAAA,QAAAA,CACtB,EACAqE,YAAY,EACZC,gBAAgB;AAAA,UAACC,OAAOjF;AAAAA,UAA8BO,KAAKC;AAAAA,UAAIxB;AAAAA,UAAWM;AAAAA,QAAAA,CAAO,EACjF4F,OAAAA,GACHL,eACAxI,QAAQ4H,OAAO,UAAUY,WAAW,sBAChCD,WAASvI,QAAQiB,QAAQ,gCAAgC0B,UAAUrF,IAAI,GAAG;AAAA,eACvEuB,KAAK;AAKZ,YAJAE,QAAQF,KACRmB,QAAQmB,KACN,uCAAuCwB,UAAUrF,IAAI;AAAA,EAAOgE,uBAAMwF,IAAI,GAAGjI,IAAIuC,OAAO,EAAE,CAAC,EACzF,GACIgH,eAAsBvJ,OAAAA;AAAAA,MAAAA,UAClB;AACJ0J,mBACFzJ,OAAOuC,MACLC,eAAAA,QAAMC,KAAK,oBAAe4C,EAAE,gBAAgBE,SAAS,cAAc1B,UAAUyB,OAAO,EAAE,CACxF;AAAA,MAAA;AAAA,IAGN;AAGA,QAAIiE,eAAe;AACjB,YAAMS,kBAAkBvI,SAASK,WAAWmI,KACzCpG,CAAqCA,cAAAA,UAAUrF,SAAS+K,aAC3D;AACA,UAAI,CAACS;AACK3H,cAAAA,QAAAA,KAAK,aAAakH,aAAa,wBAAwB,GACzD,IAAI9K,MAAM,aAAa8K,aAAa,sCAAsChE,SAAS,EAAE;AAE7F,YAAMoE,WAAWK,eAAwC,GACzD9I,QAAQiB,QAAQ,kBAAkB;AAAA,IACpC;AACE,YAAMgB,QAAQY,IACZtC,SAASK,WAAWoD,IAAI,OAAOrB,cAAoD;AACjF,cAAM8F,WAAW9F,SAAS;AAAA,MAC3B,CAAA,CACH,GACA3C,QAAQiB,QAAQ,UAAUuH,WAAW,IAAIjI,SAASK,WAAWsG,MAAM,UAAU;AAG/E,QAAInI,MAAaA,OAAAA;AACjB;AAAA,WACOF,KAAK;AAEZ,QAAIuJ,eAAsBvJ,OAAAA;AACnBA,WAAAA;AAAAA,EAAAA,UACC;AACDwC,WAAAA,MAAM,GAAGC,eAAAA,QAAMC,KAAK,kCAA6B,CAAC,IAAID,eAAAA,QAAMgE,KAAK,oBAAoB,CAAC,EAAE;AAAA,EAAA;AAEnG;;;;;;;;;ACpMA,MAAM0D,WAAsC;AAAA,EAC1C1L,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCCMC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAWXC,kBAAwC;AAAA,EAC5C/L,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,SACA6K,eAEoB,MAAMC,oBAEP/K,MAAMC,SAAS6K,SAAS;AAAA,EAE7CJ,UAAAA;AACF;AAEA,eAAeK,mBAAiB;AAUlB,UAAA,MAAM;mBAAO,kBAAiC;AAAA,EAAA,CAAA,EAAA,KAAA,SAAA,GAAA;AAAA,WAAA,EAAA;AAAA,EAAA,CAAA,GAE/CC;AACb;ACxCA,MAAMN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA,IAKb3D,uBAAuB,0DAA0D,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQjFkE,mBAAyC;AAAA,EAC7CrM,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,mBAAmC;AAAA,MAEjD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GC5BMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXQ,gBAAsC;AAAA,EAC1CtM,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEkB,MAAMkL,eAAa,GAEpBnL,MAAMC,OAAO;AAAA,EAEhCyK,UAAAA;AACF;AAEA,eAAsBS,iBAKpB;AAUY,UAAA,MAAM;mBAAO,gBAA6B;AAAA,EAAA,CAAA,GAE3CH;AACb;ACtDaI,MAAAA,gBACX7L,QAAQ8L,OAAOC,SAAS/L,QAAQC,IAAI+L,SAAS,UAAU,EAAE,QAAQhM,QAAQC,MCMrEkL,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXc,aAAmC;AAAA,EACvC5M,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEkB,MAAMkL,aAAa,GAEpBnL,MAAMC,OAAO;AAAA,EAEhCyK,UAAAA;AACF;AAEA,eAAsBS,eAKpB;AAUY,UAAA,MAAM;mBAAO,gBAA6B;AAAA,EAAA,CAAA,GAE3CH;AACb;AC5CA,MAAMN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXe,kBAAwC;AAAA,EAC5C7M,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,YACG;AACG,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO8I;AAAAA,IAAUzL,IAAAA,SAC1B0L,gBAAgB,MAAMC,mBAEtBvL,GAAAA,QAASwL,CAAgBzL,QAAAA,OAAO0L,KAAKlJ,OAAMwF,IAAI2D,QAAQF,GAAG,CAAC;AAE7D,QAAA;AACIF,YAAAA,cAAc3L,MAAMC,OAAO;AAAA,aAC1BE,KAAK;AACZ,UAAIA,IAAIvB,SAAS;AACTuB,cAAAA;AAGFA,YAAAA,IAAIuC,OAAO,GACjBrC,MAAM;AAAA,CAAI,GAGR+K,iBACC,MAAMM,OAAOM,OAAO;AAAA,QACnBtJ,SAAS;AAAA,QACTuF,MAAM;AAAA,MAAA,CACP,IAID,OADkB,MAAMkD,gBACRnL,MAAMC,OAAO,IAI7BV,QAAQ0M,KAAK,CAAC;AAAA,IAAA;AAAA,EAGpB;AAAA,EACAvB,UAAAA;AACF;AAEA,eAAekB,qBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC9EO,MAAMkB,sBAAoB,eAE3BC,qBAAgD;AAAA,EACpDvN,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbD,aAAa;AACf;ACAA,SAAS4B,YAAYjM,KAAkB;AACrC,QAAMkM,SAAS,CAAC;AACZlM,SAAAA,IAAImM,OACND,OAAOE,aAAapM,IAAImM,OACfnM,IAAIoM,eACbF,OAAOE,aAAapM,IAAIoM,aAGtBpM,IAAIuC,UACN2J,OAAO3J,UAAUvC,IAAIuC,UACZvC,IAAIqM,gBACbH,OAAO3J,UAAUvC,IAAIqM,gBACZrM,KAAKsM,UAAUC,MAAMhK,UAC9B2J,OAAO3J,UAAUvC,IAAIsM,SAASC,KAAKhK,UAC1BvC,KAAKsM,UAAUE,MAAMjK,UAC9B2J,OAAO3J,UAAUvC,IAAIsM,SAASE,KAAKjK,UAGnC2J,OAAO3J,UAAUN,KAAKC,UAAUlC,GAAG,GAG9BkM;AACT;AC9BaO,MAAAA,UAAQC,uBAAQ,aAAa;ACAnC,SAASC,oBAAoBC,aAAqC;AACvE,MAAI,CAACA;AACI,WAAA;AAGHnO,QAAAA,OAAO,GAAGmO,WAAW;AAEvBnO,SAAAA,KAAKoO,YAAY,MAAMpO,OAClB,kDAGLA,KAAK4J,SAAS,IACT,sDAGL5J,KAAK4J,SAASyE,KACT,+CAGJ,YAAYC,KAAKtO,IAAI,IAIrB,wBAAwBsO,KAAKtO,IAAI,IAIlC,QAAQsO,KAAKtO,IAAI,IACZ,2DAGF,KAPE,4EAJA;AAYX;AC9BO,SAASuO,qBACdzB,QACA0B,UAAgD,IAC/B;AACjB,SAAO1B,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACT2K,UAAWzO,CAAS,SACNkO,oBAAoBlO,IAAI,KAK7B;AAAA,IAET,GAAGwO;AAAAA,EAAAA,CACJ;AACH;AChBA,eAAsBE,oBACpBrN,SACAmN,UAAuD,IACtC;AACX,QAAA;AAAA,IAACpG;AAAAA,IAAW0E;AAAAA,MAAUzL,SACtB;AAAA,IAACyC;AAAAA,IAAS6K;AAAAA,EAAAA,IAAiBH,SAC3BnG,SAASD,aAETwG,WAAW,MAAMvG,OAAOuG,SAASC,KAAK,GACtCC,gBAAgBF,SAASnD,KAAM3E,aAAYA,QAAQ9G,SAAS,YAAY,GACxE+O,iBAAiBH,SAASlI,IAAKI,CAAa,aAAA;AAAA,IAAC4C,OAAO5C,QAAQ9G;AAAAA,EAAM,EAAA,GAClEgP,WAAW,MAAMlC,OAAOM,OAAO;AAAA,IACnCtJ,SAASA,WAAW;AAAA,IACpBuF,MAAM;AAAA,IACN4F,SAASN,gBACL,CAAC;AAAA,MAACjF,OAAO;AAAA,MAAO1J,MAAM;AAAA,OAAuB,IAAI8M,OAAOoC,aAAa,GAAGH,cAAc,IACtFA;AAAAA,EAAAA,CACL;AAED,MAAIC,aAAa,OAAO;AACtBhB,YAAM,wDAAwD;AACxDmB,UAAAA,iBAAiB,MAAMZ,qBAAqBzB,QAAQ;AAAA,MACxDhJ,SAAS;AAAA,MACTsI,SAAS0C,gBAAgBM,SAAY;AAAA,IAAA,CACtC;AACD,WAAA,MAAM/G,OAAOuG,SAASS,OAAOF,cAAc,GACpCA;AAAAA,EAAAA;AAGFH,SAAAA;AACT;ACvBA,eAAeM,iBACbjO,SACA8M,aACA1F,YAC4B;AACtB,QAAA;AAAA,IAACL;AAAAA,EAAAA,IAAa/G;AAEpB,MAAIgH,SAASD,UAAU;AACjB,QAAA;AAAA,IAACrB;AAAAA,IAAWwI;AAAAA,EAAAA,IAASlH,OAAOK,OAAO;AAEzC,MAAI,CAAC3B;AACG,UAAA,IAAI9G,MAAM,wBAAwB;AAK1C,MAAIuP,kBAA0BrB;AAC9B,SAAKqB,oBACHA,kBAAkB,MAAMd,oBAAoBrN,SAAS;AAAA,IACnDyC,SAAS;AAAA,EACV,CAAA,IAGHuE,SAASA,OAAOG,WAAW;AAAA,IAAC1B,SAASqH;AAAAA,IAAa1F;AAAAA,EAAAA,CAAW,GAEtD;AAAA,IACL1B;AAAAA,IACAoH,aAAaqB;AAAAA,IACbD;AAAAA,IACAlH;AAAAA,EACF;AACF;ACrCA,MAAMyD,aAAW;AAAA;AAAA;AAAA,GAKX2D,8BAAoD;AAAA,EACxDzP,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAClB,CAACyF,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,MAAC1D;AAAAA,MAAWoH;AAAAA,MAAaoB;AAAAA,MAAOlH;AAAAA,IAAU,IAAA,MAAMiH,iBACpDjO,SACAyF,SACAwG,mBACF;AAEI,QAAA;AACF,YAAMjF,OAAOqH,QAAQ;AAAA,QACnBC,QAAQ;AAAA,QACRC,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAa/I,SAAS,aAAaoH,WAAW;AAAA,QACnDL,MAAM;AAAA,UACJiC,SAAS;AAAA,QAAA;AAAA,MACX,CACD,GACDvO,OAAOuC,MAAM,GAAGC,OAAMgM,MAAM,sCAAsC7B,WAAW;AAAA,CAAI,CAAC,EAAE;AAAA,aAC7E1M,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW0J,YAAY/L,KAAK;AACnCD,aAAOuC,MAAM,GAAGC,OAAMwF,IAAI,oCAAoC1F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAC9E;AAEJ;ACzCA,IAAA,UAAemM,QAAQ,OAAO,EAAE,eAAe;ACO/C,MAAMC,WAAWD,QAAQ,UAAU;AAMnC,SAASE,WAAWC,WAAmBC,aAAqBC,YAAuC;AACjG,SAAO,IAAI3L,QAAQ,CAAC7C,SAAS+C,WAAW;AAChC0L,UAAAA,qBAAqBC,uBAAkBH,WAAW;AACrCI,uBAAAA,GAAG,SAAUlP,CAAe,QAAA;AAC7CsD,aAAOtD,GAAG;AAAA,IACX,CAAA,GAEDgP,mBAAmBE,GAAG,SAAS,MAAM;AAC3B,cAAA;AAAA,IAAA,CACT;AAEKC,UAAAA,UAAUR,SAAS,OAAO;AAAA,MAC9BS,MAAM;AAAA,MACNC,aAAa;AAAA,QAACC,OAAOC,sBAAKC,UAAUC;AAAAA,MAAAA;AAAAA,IAAqB,CAC1D;AAEOP,YAAAA,GAAG,SAAUlP,CAAe,QAAA;AAC5ByM,cAAA;AAAA,KAA0BzM,IAAI0P,KAAK,GACzCpM,OAAOtD,GAAG;AAAA,IACX,CAAA,GAGDmP,QAAQD,GAAG,WAAYlP,CAAe,QAAA;AAC9ByM,cAAA,uBAAuBzM,IAAIuC,OAAO;AAAA,IACzC,CAAA,GAED4M,QAAQD,GAAG,YAAaS,CAA2BA,cAAA;AACtCA,iBAAAA,UAASC,GAAGC,cAAc;AAAA,IACtC,CAAA,GAGDV,QAAQW,KAAKd,kBAAkB,GAC/BG,QAAQY,UAAUlB,WAAW,EAAK,GAClCM,QAAQa,SAAS;AAAA,EAAA,CAClB;AACH;ACzCA,MAAMC,oBAAoB;AAE1B,eAAeC,qBACbpQ,SACA8M,aACiB;AACX,QAAA;AAAA,IAACrB;AAAAA,MAAUzL,SAEX;AAAA,IAAC0F;AAAAA,IAAWwI;AAAAA,IAAOlH;AAAAA,EAAU,IAAA,MAAMiH,iBAAiBjO,SAAS8M,aAAab,mBAAiB;AAE7F,MAAA;AAGIO,UAAAA,WAAW,MAAMxF,OAAOqH,QAAQ;AAAA,MACpCE,SAAS;AAAA,QAACC,eAAe,UAAUN,KAAK;AAAA,MAAE;AAAA,MAC1CO,KAAK,aAAa/I,SAAS,aAAaoH,WAAW;AAAA,MACnDuD,OAAO;AAAA,QAACC,OAAOH,kBAAkBI,SAAS;AAAA,MAAA;AAAA,IAAC,CAC5C;AAEG/D,QAAAA,UAAUgE,SAASjI,SAAS,GAAG;AACjC,YAAMkI,kBAAkBjE,SAASgE,QAAQnL,IAAKqL,CAA0B,YAAA;AAAA,QACtErI,OAAOqI,OAAOlL;AAAAA,MAAAA,EACd;AACe,aAAA,MAAMiG,OAAOM,OAAO;AAAA,QACnCtJ,SAAS,sCAAsC0N,iBAAiB;AAAA,QAChEnI,MAAM;AAAA,QACN4F,SAAS6C;AAAAA,MAAAA,CACV;AAAA,IAAA;AAAA,WAIIvQ,KAAK;AACZ,UAAM,IAAItB,MAAM,uCAAuCkO,WAAW,KAAK5M,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAGhF,QAAA,IAAI7D,MAAM,kBAAkB;AACpC;ACvCA,eAAe+R,cAAcC,QAA+B;AACtD,MAAA;AACF,UAAMC,OAAAA,OAAOD,MAAM;AAAA,WACZ1Q,KAAK;AACNyM,YAAA,sCAAsCzM,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAE7D;ACRA,MAAMqO,cAAc,GACdC,qBAAqB,KAErBC,qBAAsBC,gBAAuB7K,KAAK8K,IAAI,GAAGD,UAAU,IAAIF;AAE7E,eAAeI,UACbC,WACAC,aAAqBP,aACT;AACHG,WAAAA,aAAa,GAAGA,aAAaI,YAAYJ;AAC5C,QAAA;AACF,aAAO,MAAMG,UAAU;AAAA,aAChBlR,KAAK;AAEZ,UAAIA,IAAIsM,YAAYtM,IAAIsM,SAASF,cAAcpM,IAAIsM,SAASF,aAAa;AACjEpM,cAAAA;AAGFoR,YAAAA,aAAaN,mBAAmBC,UAAU;AAChDtE,cAAM,qCAAqC2E,UAAU,UAAUpR,IAAIuC,OAAO,GAC1E,MAAM,IAAIa,QAAS7C,CAAAA,YAAY2C,WAAW3C,SAAS6Q,UAAU,CAAC;AAAA,IAAA;AAI5D,QAAA,IAAI1S,MAAM,oCAAoC;AACtD;AChBA,MAAM2S,uBAAqB,KAAK,KAC1BC,iBAAe,IAAI,KAAK,KAExBnD,YAAUoD,MAAAA,MAAM,CAACC,WAAAA,UAAAA,GAAaC,WAAAA,QAAS,CAAA,CAAC;AAE9C,eAAeC,cACbC,MACAC,UACAC,UACAC,QACe;AAITC,QAAAA,qBAAqBnR,sBAAKoR,SAASJ,QAAQ,GAE3CK,gBAAgBC,iBAAiBH,oBAAoBF,UAAUC,MAAM;AAC3E,QAAMb,UAAU,YAAY;AACpB3E,UAAAA,WAAW,MAAM6B,UAAQ;AAAA,MAC7BwD,KAAAA;AAAAA,MACAQ,cAAc;AAAA,MACdnP,SAAS;AAAA,QAACoP,SAASf;AAAAA,QAAoBgB,QAAQf;AAAAA,MAAY;AAAA,MAC3DgB,QAAQ;AAAA,IAAA,CACT;AAEK7F,YAAA,yCAAyCsF,oBAAoBzF,UAAUF,UAAU,GAEvF,MAAMmG,SAAAA,SAASjG,SAASC,MAAM0C,KAAkBgD,kBAAAA,aAAa,CAAC;AAAA,EAAA,CAC/D;AACH;AAEA,SAASC,iBAAiBN,UAAkBC,UAAkBC,QAAwB;AAGpF,MAAIG,gBAAgB;AACpB,SAAIJ,aAAa,UACfI,gBAAgBrR,cAAAA,QAAKJ,KAAKsR,QAAQ,UAAUF,QAAQ,IAC3CC,aAAa,WACtBI,gBAAgBrR,cAAAA,QAAKJ,KAAKsR,QAAQ,SAASF,QAAQ,IAG9CK;AACT;AC9CA,MAAMZ,qBAAqB,KAAK,KAC1BC,eAAe,IAAI,KAAK,KAExBnD,UAAUoD,MAAAA,MAAM,CAACC,WAAAA,UAAAA,GAAaC,WAAAA,QAAS,CAAA,CAAC;AAG9C,eAAee,iBAAiBb,MAA2B;AACzD,QAAMrF,WAAW,MAAM2E,UAA8B,MACnD9C,QAAQ;AAAA,IACNwD,KAAAA;AAAAA,IACAQ,cAAc;AAAA,IACdnP,SAAS;AAAA,MAACoP,SAASf;AAAAA,MAAoBgB,QAAQf;AAAAA,IAAAA;AAAAA,EAAY,CAC5D,CACH;AAEA7E,SAAAA,QAAM,iDAAiDkF,MAAKrF,UAAUF,UAAU,GAEzEE,SAASC;AAClB;ACRA,MAAMkG,iCAAiCC,YAAAA,SAAS;AAAA,EACtCC,SAAS;AAAA,EAMVC,aAAa;AAAA,EAEpBC,YACE/L,QACAtB,WACAoH,aACAkG,UACA9E,OACA;AACM,UAAA;AAAA,MAAC+E,YAAY;AAAA,IAAA,CAAK,GACxB,KAAKjM,SAASA,QACd,KAAKtB,YAAYA,WACjB,KAAKoH,cAAcA,aACnB,KAAKkG,WAAWA,UAChB,KAAK9E,QAAQA;AAAAA,EAAAA;AAAAA,EAGf,MAAMgF,QAAuB;AACvB,QAAA;AACIxG,YAAAA,OAAO,MAAM,KAAKyG,oBAAoB;AAGxC,WAAKL,eAAe,MACtB,KAAKA,aAAapG,KAAKoG,aAGzBpG,KAAK0G,MAAMxM,QAASyM,CAAe,SAAA,KAAK1P,KAAK0P,IAAI,CAAC,GAE9C,OAAO3G,KAAK4G,cAAe,YAAY5G,KAAK4G,eAAe,KAC7D,KAAKT,SAASnG,KAAK4G,aAGnB,KAAK3P,KAAK,IAAI;AAAA,aAETzD,KAAK;AACZ,WAAKqT,QAAQrT,GAAY;AAAA,IAAA;AAAA,EAC3B;AAAA;AAAA,EAIF,MAAMiT,sBAAkD;AACtD,UAAM9C,QAAqB,KAAKwC,WAAW,KAAK,CAAA,IAAK;AAAA,MAACS,YAAY,KAAKT;AAAAA,IAAM;AAEzE,QAAA;AACK,aAAA,MAAM,KAAK7L,OAAOqH,QAAQ;AAAA,QAC/BE,SAAS;AAAA,UAACC,eAAe,UAAU,KAAKN,KAAK;AAAA,QAAE;AAAA,QAC/CO,KAAK,aAAa,KAAK/I,SAAS,aAAa,KAAKoH,WAAW,YAAY,KAAKkG,QAAQ;AAAA,QACtF3C;AAAAA,MAAAA,CACD;AAAA,aACMjQ,OAAO;AAEd,UAAIwL,MAAMxL,MAAMkM,aAAalM,MAAMoM,SAASC,KAAKhK,UAAUrC,MAAMqC;AAG7DmJ,YAAAA,QAAQmC,WACVnC,MAAMjG,OAAOvF,KAAK,IAEd,IAAIxB,MAAM,sCAAsCgN,GAAG,EAAE;AAAA,IAAA;AAAA,EAC7D;AAEJ;ACnEA,MAAM4H,cAAcA,CAACrT,QAAsBsT,cAAuC;AAChF,MAAIpS,UAAUlB,OAAOkB,QAAQoS,SAAS,EAAE/U,SACpCgV,eAA8B;AAAA,IAACC,MAAMF;AAAAA,EAAAA,GACrC/U,QAAQqD,KAAKjD,IAAI;AAErB,QAAM4D,QAASmN,CAA4BA,cAAA;AACzC,UAAM+D,UAAUC,kBAAAA,QAAS9R,KAAKjD,IAAAA,IAAQJ,KAAK;AACvCmR,IAAAA,UAAS3J,WAAW2J,UAAS3J,UAAU,KAAK2J,UAASiE,SAASjE,UAASiE,QAAQ,IACjFzS,QAAQ4H,OAAO,GAAG4G,UAAS8D,IAAI,KAAK9D,UAAS3J,OAAO,IAAI2J,UAASiE,KAAK,MAAMF,OAAO,MAEnFvS,QAAQ4H,OAAO,GAAG4G,UAAS8D,IAAI,KAAKC,OAAO;AAAA,EAE/C;AAEO,SAAA;AAAA,IACLG,KAAMlE,CAA4BA,cAAA;AAC5BA,MAAAA,UAAS8D,SAASD,aAAaC,QACjCjR,MAAMgR,YAAY,GAClBrS,QAAQiB,QAAQ,GAChBjB,UAAUlB,OAAOkB,QAAQwO,UAAS8D,IAAI,EAAEjV,MACxCA,GAAAA,QAAQqD,KAAKjD,IAAAA,KACJ+Q,UAAS8D,SAASD,aAAaC,QAAQ9D,UAAShL,UACzDnC,MAAMmN,SAAQ,GAEhB6D,eAAe7D;AAAAA,IACjB;AAAA,IACAhL,QAASgL,CAA4BA,cAAA;AAC7BA,YAAAA,SAAQ,GACd6D,eAAe7D;AAAAA,IACjB;AAAA,IACAvN,SAASA,MAAM;AACbjB,cAAQiB,QAAQ,GAChB5D,QAAQqD,KAAKjD,IAAI;AAAA,IACnB;AAAA,IACA0D,MAAMA,MAAM;AACVnB,cAAQmB,KAAK,GACb9D,QAAQqD,KAAKjD,IAAI;AAAA,IAAA;AAAA,EAErB;AACF;ACxDA,SAASkV,cAAc3N,OAAsB;AAC3C,QAAMI,IAAIJ,SAAQ,IAAI,IAAID,KAAK6N,MAAM7N,KAAK8N,IAAI7N,KAAI,IAAID,KAAK8N,IAAI,IAAI,CAAC;AACpE,SAAO,IAAI7N,QAAOD,KAAK8K,IAAI,MAAMzK,CAAC,GAAGlE,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,MAAM,MAAM,MAAM,IAAI,EAAEkE,CAAC,CAAC;AACrF;ACHA,SAAS0N,cAAcC,UAA2B;AAEzC,SAAA,CAAC,SAASnH,KAAKmH,QAAQ;AAChC;AC8BA,MAAMzH,QAAQ0H,eAAAA,QAAY,eAAe,GAEnCC,+BAA+B,IAC/BC,2BAA2B,IAa3B9J,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAajB,SAAS+J,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDoI,QAAQ,aAAa;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACrCmF,QAAQ,OAAO;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EAC/BmF,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAASuJ;AAAAA,EAAAA,CAA6B,EAC9EnH,QAAQ,aAAa;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAM,CAAA,EAAE5B;AAC7D;AAEA,MAAMwL,wBAA8C;AAAA,EAClDhW,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA;AAAAA,EAEAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C,SAClB,CAACgH,QAAQ4N,IAAI,IAAI,MAAMC,qBAAqB7U,SAASD,IAAI,GACzD;AAAA,MAAC2F;AAAAA,MAAWoH;AAAAA,MAAakG;AAAAA,MAAUhB;AAAAA,MAAQ8C;AAAAA,IAAAA,IAAeF;AAG5D5C,QAAAA,WAAW,MAAM8C,gBAAgB,IAAI;AACvC3U,aAAOuC,MAAM,sBAAsB;AACnC;AAAA,IAAA;AAEF,UAAMsM,cAAclO,cAAAA,QAAKJ,KAAKsR,QAAQ8C,WAAW;AAEjD3U,WAAOuC,MAAM,gXAA+D,GAC5EvC,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,WAAW,CAAC,KAAKpS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,SAAS,CAAC,KAAKpS,OAAMgE,KAAKmG,WAAW,EAAEpG,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,UAAU,CAAC,KAAKpS,OAAMgE,KAAKqM,QAAQ,EAAEtM,OAAO,EAAE,CAAC,SAAI,GAChFvG,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,gXAA+D,GAC5EvC,OAAOuC,MAAM,EAAE,GACfvC,OAAOuC,MAAM,0BAA0BC,OAAMgE,KAAKqI,WAAW,CAAC,GAAG;AAEjE,UAAMtQ,QAAQqD,KAAKjD,IACbkW,GAAAA,kBAAkBxB,YAAYrT,QAAQ,kCAAkC,GAKxE4O,YAAY,MAAMkG,GAAAA,QAAQnU,cAAAA,QAAKJ,KAAKwU,GAAO,OAAA,GAAG,gBAAgB,CAAC;AAGrE,eAAWC,OAAO,CAACnD,QAAQlR,cAAAA,QAAKJ,KAAKqO,WAAW,QAAQ,GAAGjO,cAAAA,QAAKJ,KAAKqO,WAAW,OAAO,CAAC;AACtFqG,WAAAA,UAAUD,KAAK;AAAA,QAAC1T,WAAW;AAAA,MAAA,CAAK;AAGlCkL,UAAM,qCAAqCoC,SAAS;AACpD,UAAMsG,sBAAsBvU,cAAAA,QAAKJ,KAAKqO,WAAW,aAAa,GAGxDuG,eAAenG,KAAAA,kBAAkBkG,mBAAmB,GACpDE,gBAAgB,IAAIC,WAAAA,MAAM;AAE5B,QAAA;AACF,YAAMC,mBAAmB,IAAI9C,yBAC3B3L,QACA4N,KAAKlP,WACLkP,KAAK9H,aACL8H,KAAK5B,UACL4B,KAAK1G,KACP,GAEMkF,QAAgB,CAAE;AACxB,UAAI3M,IAAI;AACR,uBAAiB4M,QAAQoC;AACvBrC,cAAMzP,KAAK0P,IAAI,GACf5M,KACAuO,gBAAgBjB,IAAI;AAAA,UAClBJ,MAAM;AAAA,UACN9O,QAAQ;AAAA,UACRqB,SAASO;AAAAA,UACTqN,OAAO2B,iBAAiB3C;AAAAA,QAAAA,CACzB;AAGH,UAAI4C,uBAAuB;AAErB,YAAA;AAAA,QAAC3K,SAAS4K;AAAAA,MAAAA,IAAQ,MAAM,OAAO,OAAO;AACtCA,YAAAA,KACJvC,OACA,OAAOC,SAAe;AACpB,YAAIA,KAAKrL,SAAS,UAAUqL,KAAKrL,SAAS;AACxC,gBAAM4J,cAAcyB,KAAKxB,KAAKwB,KAAK1U,MAAM0U,KAAKrL,MAAM+G,SAAS;AAAA,aACxD;AACL,gBAAM6G,MAAM,MAAMlD,iBAAiBW,KAAKxB,GAAG;AACrC0D,gBAAAA,cAAcM,aAAa,MAAM;AACxBC,yBAAAA,MAAM,GAAGF,GAAG;AAAA,CAAI;AAAA,UAAA,CAC9B;AAAA,QAAA;AAGqB,gCAAA,GACxBZ,gBAAgBjB,IAAI;AAAA,UAClBJ,MAAM;AAAA,UACN9O,QAAQ;AAAA,UACRqB,SAASwP;AAAAA,UACT5B,OAAO2B,iBAAiB3C;AAAAA,QAAAA,CACzB;AAAA,MAAA,GAEH;AAAA,QAACiD,aAAanB,KAAKmB;AAAAA,MAAAA,CACrB;AAAA,aACO3V,OAAO;AACd4U,sBAAgBxS,KAAK;AACf,YAAA;AAAA,QAACC;AAAAA,MAAAA,IAAW0J,YAAY/L,KAAK;AACnC,YAAM,IAAIxB,MAAM,sCAAsC6D,OAAO,EAAE;AAAA,IAAA;AAGjE6S,iBAAavW,OACb,MAAMiX,SAAAA,SAASV,YAAY,GAE3BN,gBAAgBjB,IAAI;AAAA,MAACJ,MAAM;AAAA,MAAqC9O,QAAQ;AAAA,IAAA,CAAK;AACzE,QAAA;AACIiK,YAAAA,WAAWC,WAAWC,aAAce,CAA2B,mBAAA;AACnEiF,wBAAgBnQ,OAAO;AAAA,UACrB8O,MAAM,mCAAmCK,cAAcjE,cAAc,CAAC;AAAA,QAAA,CACvE;AAAA,MAAA,CACF;AAAA,aACM7P,KAAK;AACZ8U,YAAAA,gBAAgBxS,QACV,IAAI5D,MAAM,4BAA4BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG3DuS,oBAAgBjB,IAAI;AAAA,MAClBJ,MAAM,kCAAkChR,OAAMgE,KAAK,GAAGoI,SAAS,EAAE,CAAC;AAAA,IACnE,CAAA,GACD,MAAM4B,cAAc5B,SAAS,GAE7BiG,gBAAgBjB,IAAI;AAAA,MAClBJ,MAAM,6BAA6BE,kBAAAA,QAAS9R,KAAKjD,IAAI,IAAIJ,KAAK,CAAC;AAAA,IAAA,CAChE,GACDsW,gBAAgB1S,QAAQ;AAAA,EAAA;AAE5B;AAGA,eAAeuS,qBACb7U,SACAD,MACgD;AAC1CO,QAAAA,QAAQ,MAAMkU,gBAAczU,IAAI,GAChC,CAAC0F,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,IAACqC;AAAAA,IAAQpL;AAAAA,MAAWL,SACpB;AAAA,IAAC0F;AAAAA,IAAWoH;AAAAA,IAAa9F;AAAAA,MAAU,MAAMiH,iBAC7CjO,SACAyF,SACAwG,mBACF,GAEM;AAAA,IAACiC;AAAAA,EAAAA,IAASlH,OAAOK,OAAO;AAC9B,MAAI,CAAC4O,kBAAAA,QAAS/H,KAAK,KAAKA,MAAM3F,SAAS;AAC/B,UAAA,IAAI3J,MAAM,kBAAkB;AAGpC,MAAI,CAACqX,kBAAAA,QAASnJ,WAAW,KAAKA,YAAYvE,SAAS;AACjD,UAAM,IAAI3J,MAAM,WAAWkO,WAAW,+BAA+B;AAGjEkG,QAAAA,WAAWrN,OAAOrF,MAAM,WAAW,KAAM,MAAM8P,qBAAqBpQ,SAAS8M,WAAW,CAAE;AAChG,MAAIkG,SAASzK,SAAS;AACpB,UAAM,IAAI3J,MAAM,aAAa0B,MAAM,WAAW,CAAC,2BAA2B;AAG5E,MAAI,iBAAiBA,UACfA,MAAMyV,cAAc,KAAKzV,MAAMyV,cAAcxB;AAC/C,UAAM,IAAI3V,MAAM,iCAAiC2V,wBAAwB,QAAQ;AAIrF,QAAM2B,qBAAqB,GAAGpJ,WAAW,WAAWkG,QAAQ;AAC5D,MAAImD,MAAM,OAAO,YACX7V,MAAM6V,QAAQpI,SAETqI,KAAW9V,WAAAA,MAAM6V,GAAG,IAGf,MAAM1K,OAAOM,OAAO;AAAA,IAChC/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACTsI,SAASjK,cAAAA,QAAKJ,KAAKL,SAAS6V,kBAAkB;AAAA,IAC9CG,QAAQD,KAAAA;AAAAA,EAAAA,CACT,GAEA;AAGH,SAAIjC,cAAcgC,GAAG,MACnBA,MAAMrV,cAAAA,QAAKJ,KAAKyV,KAAKD,kBAAkB,IAIrC,CAAC5V,MAAMgW,aAAaC,KAAAA,WAAWJ,GAAG,MACZ,MAAM1K,OAAOM,OAAO;AAAA,IAC1C/D,MAAM;AAAA,IACNvF,SAAS,SAAS0T,GAAG;AAAA,IACrBpL,SAAS;AAAA,EACV,CAAA,MAKCoL,MAAM,MAIH,CACLnP,QACA;AAAA,IACEtB;AAAAA,IACAoH;AAAAA,IACAkG;AAAAA,IACA9E;AAAAA,IACA8D,QAAQlR,cAAAA,QAAKgC,QAAQqT,GAAG;AAAA,IACxBrB,aAAahU,cAAAA,QAAKoR,SAASiE,GAAG;AAAA,IAC9BG,WAAWhW,MAAMgW;AAAAA,IACjBP,aAAazV,MAAMyV,eAAezB;AAAAA,EAAAA,CACnC;AAEL;ACrRA,MAAM7J,aAAW;AAAA;AAAA;AAAA,GAKX+L,6BAAmD;AAAA,EACvD7X,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAClB,CAACyF,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,MAAC1D;AAAAA,MAAWoH;AAAAA,MAAaoB;AAAAA,MAAOlH;AAAAA,IAAU,IAAA,MAAMiH,iBACpDjO,SACAyF,SACAwG,mBACF;AAEI,QAAA;AACF,YAAMjF,OAAOqH,QAAQ;AAAA,QACnBC,QAAQ;AAAA,QACRC,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAa/I,SAAS,aAAaoH,WAAW;AAAA,QACnDL,MAAM;AAAA,UACJiC,SAAS;AAAA,QAAA;AAAA,MACX,CACD,GAEDvO,OAAOuC,MACL,GAAGC,OAAMgM,MACP,+BAA+B7B,WAAW;AAAA;AAAA,CAC5C,CAAC,EACH,GAEA3M,OAAOuC,MACL,GAAGC,OAAMoS,KAAK;AAAA,CAAsE,CAAC,EACvF;AAAA,aACO3U,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW0J,YAAY/L,KAAK;AACnCD,aAAOuC,MAAM,GAAGC,OAAMwF,IAAI,mCAAmC1F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAC7E;AAEJ,GCxCMgU,4BAA4B,IAuB5BhM,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAajB,SAAS+J,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDoI,QAAQ,SAAS;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACjCmF,QAAQ,UAAU;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EAClCmF,QAAQ,SAAS;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAAS0L;AAAAA,IAA2BC,OAAO;AAAA,EAAI,CAAA,EAAEvN;AACxF;AAEA,MAAMwN,2BAAyE;AAAA,EAC7EhY,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C,SAClBM,QAAQ,MAAMkU,gBAAczU,IAAI,GAChC,CAAC0F,OAAO,IAAI1F,KAAKqJ,oBAEjB;AAAA,MAAC1D;AAAAA,MAAWoH;AAAAA,MAAaoB;AAAAA,MAAOlH;AAAAA,IAAAA,IAAU,MAAMiH,iBACpDjO,SACAyF,SACAwG,mBACF,GAEMoE,QAAsC;AAAA,MAACC,OAAOmG,0BAA0BlG,SAAS;AAAA,IAAC;AACxF,QAAIjQ,MAAMgQ,OAAO;AAGf,UAAIhQ,MAAMgQ,QAAQ,KAAKhQ,MAAMgQ,QAAQsG,OAAOC;AAC1C,cAAM,IAAIjY,MACR,qDAAqDgY,OAAOC,gBAAgB,EAC9E;AAEIvG,YAAAA,QAAQhQ,MAAMgQ,MAAMC,SAAS;AAAA,IAAA;AAGjCjQ,QAAAA,MAAMwW,UAAUxW,MAAMyW;AACpB,UAAA;AACIC,cAAAA,eAAeC,iBAAiB3W,MAAMwW,MAAM,GAC5CI,cAAcD,iBAAiB3W,MAAMyW,KAAK;AAEhD,YAAIG,eAAeF,gBAAgBG,gBAAQD,aAAaF,YAAY;AAC5D,gBAAA,IAAIpY,MAAM,sCAAsC;AAGxDyR,cAAMyG,SAASxW,MAAMwW,QACrBzG,MAAM0G,QAAQzW,MAAMyW;AAAAA,eACb7W,KAAK;AACZ,cAAM,IAAItB,MAAM,uBAAuBsB,GAAG,EAAE;AAAA,MAAA;AAI5CsM,QAAAA;AACA,QAAA;AACS,iBAAA,MAAMxF,OAAOqH,QAA4B;AAAA,QAClDE,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAa/I,SAAS,aAAaoH,WAAW;AAAA,QACnDuD,OAAO;AAAA,UAAC,GAAGA;AAAAA,QAAAA;AAAAA,MAAK,CACjB;AAAA,aACMjQ,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW0J,YAAY/L,KAAK;AACnCD,aAAOC,MAAM,GAAGuC,OAAMwF,IAAI,+BAA+B1F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAGrE+J,QAAAA,YAAYA,SAASgE,SAAS;AAC5BhE,UAAAA,SAASgE,QAAQjI,WAAW,GAAG;AACjCpI,eAAOuC,MAAM,mBAAmB;AAChC;AAAA,MAAA;AAGI0U,YAAAA,QAAQ,IAAIC,0BAAM;AAAA,QACtBC,SAAS,CACP;AAAA,UAAC3Y,MAAM;AAAA,UAAY4Y,OAAO;AAAA,UAAYC,WAAW;AAAA,QAAA,GACjD;AAAA,UAAC7Y,MAAM;AAAA,UAAa4Y,OAAO;AAAA,UAAcC,WAAW;AAAA,QAAA,GACpD;AAAA,UAAC7Y,MAAM;AAAA,UAAY4Y,OAAO;AAAA,UAAaC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAE5D;AAEQhH,eAAAA,QAAQ5J,QAAS8J,CAAmC,WAAA;AACrD,cAAA;AAAA,UAAClL;AAAAA,UAAI1D;AAAAA,QAAAA,IAAa4O;AACxB0G,cAAMK,OAAO;AAAA,UACXC,UAAU;AAAA,UACV5V,WAAW6V,QAAY5V,YAAAA,KAAKgH,MAAMjH,SAAS,GAAG,qBAAqB;AAAA,UACnEkR,UAAUxN;AAAAA,QAAAA,CACX;AAAA,MAAA,CACF,GAED4R,MAAMQ,WAAW;AAAA,IAAA;AAAA,EACnB;AAEJ;AAEA,SAASX,iBAAiBY,MAA4C;AACpE,MAAI,CAACA,KAAM;AACX,QAAMC,aAAa/O,QAAAA,MAAM8O,MAAM,cAAc,oBAAI9V,MAAM;AACvD,MAAIgW,QAAAA,QAAQD,UAAU;AACbA,WAAAA;AAGT,QAAM,IAAIlZ,MAAM,WAAWiZ,IAAI,8BAA8B;AAC/D;AC/IA,MAAMpN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXuN,eAAqC;AAAA,EACzCrZ,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,SACA6K,eAEoB,MAAMC,kBAEP/K,MAAMC,SAAS6K,SAAS;AAAA,EAE7CJ,UAAAA;AACF;AAEA,eAAeK,iBAAiB;AAUlB,UAAA,MAAM;mBAAO,kBAAiC;AAAA,EAAA,CAAA,EAAA,KAAA,SAAA,GAAA;AAAA,WAAA,EAAA;AAAA,EAAA,CAAA,GAE/CC;AACb;ACvCA,MAAMkN,sBAAsB,6BACtBC,kBAAkB;AAMFC,eAAAA,cACpBC,aACA9X,OACAN,SACkB;AACZ,QAAA;AAAA,IAAC+G;AAAAA,IAAW0E;AAAAA,IAAQtL;AAAAA,EAAUH,IAAAA,SAC9BqY,SAAS,OAAOD,cAClBE,wBAAwBF,WAAW,IACnCG,kBAAgB9M,MAAM,IAEpB+M,cAAcH,OAAOI,SAAS,GAAG;AACvC,MAAID,eAAe,CAAE,MAAME,8BAA8BL,QAAQrY,OAAO;AAC/D,WAAA;AAEH2Y,QAAAA,mBACJ,OAAOrY,MAAMsY,cAAgB,MACzB,MAAMC,qBAAqBL,aAAaxY,OAAO,IAC/C8Y,CAAAA,CAAQxY,MAAMsY;AAEhBR,SAAAA,gBAAgBC,UAClBlY,OAAOuC,MAAM,wBAAwB2V,MAAM,EAAE,GAQ/C,MALetR,UAAU;AAAA,IACvBE,aAAa;AAAA,IACbC,gBAAgB;AAAA,EACjB,CAAA,EAEYmH,QAAQ;AAAA,IACnBC,QAAQ;AAAA,IACRuD,KAAK;AAAA,IACLpF,MAAM;AAAA,MAAC4L;AAAAA,MAAQM;AAAAA,IAAgB;AAAA,IAC/BtG,cAAc;AAAA,EACf,CAAA,GAEM;AACT;AAEA,SAASwG,qBAAqBL,aAAsBxY,SAA6C;AACzF,QAAA;AAAA,IAACyL;AAAAA,IAAQtL;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAEhCG,SAAAA,OAAOuC,MAAM,EAAE,GACX8V,cACFrY,OAAOuC,MAAMqW,iBAAAA;AAAAA,QACTpW,OAAMqW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,WAAW,CAAC;AAAA,WAC3CvW,OAAMwF,IAAIxF,OAAMwW,UAAU,QAAQ,CAAC,CAAC;AAAA;AAAA,iCAEdxW,OAAMwW,UAAU,gBAAgB,CAAC;AAAA;AAAA,KAE7D,IAEDhZ,OAAOuC,MAAMqW,iBAAAA;AAAAA,QACTpW,OAAMqW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,WAAW,CAAC;AAAA;AAAA;AAAA,iBAGrCvW,OAAMwW,UAAU,gBAAgB,CAAC;AAAA;AAAA;AAAA,KAG7C,GAGHhZ,OAAOuC,MAAM,EAAE,GAER+I,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAASsW,iBAAAA;AAAAA;AAAAA;AAAAA,IAGThO,SAAS;AAAA,EAAA,CACV;AACH;AAEA,SAAS2N,8BACPL,QACArY,SACkB;AACZ,QAAA;AAAA,IAACyL;AAAAA,IAAQtL;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAEhCG,SAAAA,OAAOuC,MAAM,EAAE,GACfvC,OAAOuC,MAAMC,OAAMqW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,wCAAwC,CAAC,GAEpFb,WAAW,OACblY,OAAOuC,MAAM,kCAAkC,GAC/CvC,OAAOuC,MAAM,2CAA2C,GACxDvC,OAAOuC,MAAM,iCAAiC,GAC9CvC,OAAOuC,MAAM,2BAA2B,MAExCvC,OAAOuC,MAAM,KAAK2V,OAAOe,QAAQ,OAAO,OAAO,EAAEA,QAAQ,OAAO,KAAK,CAAC,EAAE,GACxEjZ,OAAOuC,MAAM,KAAK2V,OAAOe,QAAQ,OAAO,OAAO,EAAEA,QAAQ,OAAO,SAAS,CAAC,EAAE,IAG9EjZ,OAAOuC,MAAM,EAAE,GAER+I,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAASsW,iBAAAA;AAAAA,+BACkBpW,OAAMwF,IAAI,OAAO,CAAC;AAAA,gBACjCxF,OAAMwW,UAAU,iBAAiB,CAAC;AAAA,IAC9CpO,SAAS;AAAA,EAAA,CACV;AACH;AAEA,SAASwN,kBAAgB9M,QAAsC;AAC7D,SAAOA,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACT4T,QAAQgD;AAAAA,IACRjM,UAAWiL,CAAAA,WAAWiB,eAAejB,QAAQA,MAAM;AAAA,EAAA,CACpD;AACH;AAEA,SAASgB,aAAahB,QAA+B;AACnD,MAAIA,WAAW,OAAOA,WAAW,eAAeA,WAAW;AAClDA,WAAAA;AAGL,MAAA;AACF,UAAMkB,UAAUlB,OACbe,QAAQ,aAAa,KAAKnB,mBAAmB,EAAE,EAC/CmB,QAAQ,OAAOlB,eAAe,GAE3BsB,SAAS3H,aAAAA,QAAI9I,MAAMwQ,OAAO;AAC5BE,QAAAA,OAAOD,OAAOC,QAAQ;AAC1B,WAAI,YAAYxM,KAAKuM,OAAOE,YAAY,EAAE,MACxCD,OAAOA,KAAKL,QAAQ,cAAc,EAAE,IAGtCK,OAAOA,KAAKL,QAAQlB,iBAAiB,IAAI,EAAEkB,QAAQ,IAAIO,OAAO1B,qBAAqB,GAAG,GAAG,GAAG,GAErF,GAAGuB,OAAOE,QAAQ,KAAKD,IAAI;AAAA,EAAA,QACtB;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAASH,eAAejB,QAAuBD,aAAoC;AACjF,MAAIC,WAAW,OAAOA,WAAW,eAAeA,WAAW;AAClD,WAAA;AAGL,MAAA;AACEtP,WAAAA,aAAAA,QAAAA,MAAMsP,UAAW,CAAmB,GACjC;AAAA,EAAA,QACK;AAAA,EAAA;AAId,SAAI,aAAapL,KAAKmL,WAAW,IACxB,+DAGF,mBAAmBA,WAAW;AACvC;AAEA,SAASE,wBAAwBF,aAA6B;AAC5D,QAAMC,SAASgB,aAAajB,WAAW,GACjCnQ,SAASqR,eAAejB,QAAQD,WAAW;AACjD,MAAInQ,WAAW;AACP,UAAA,IAAIrJ,MAAMqJ,MAAM;AAGxB,MAAI,CAACoQ;AACG,UAAA,IAAIzZ,MAAM,gBAAgB;AAG3ByZ,SAAAA;AACT;AC5KA,MAAM5N,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAUXmP,uBAA6C;AAAA,EACjDjb,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,IAAUH,IAAAA,SACX,CAACqY,MAAM,IAAItY,KAAKqJ;AAEtB,QAAI,CAACiP;AACG,YAAA,IAAIzZ,MAAM,yDAAyD;AAG3E,UAAM0B,QAAQP,KAAKQ;AAGJuP,gBAAAA,QAAGyG,WAAWzV,sBAAKJ,KAAKpB,QAAQ4B,IAAOmX,GAAAA,MAAM,CAAC,KAE3DlY,OAAO0L,KAAK,WAAWwM,MAAM,mDAAmD,GAGlE,MAAMF,cAAcE,QAAQ/X,OAAON,OAAO,KAExDG,OAAOuC,MAAM,gCAAgC;AAAA,EAAA;AAGnD,GC1CMmX,YAAuC;AAAA,EAC3Clb,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCHMC,aAAW;AAAA;AAAA;AAAA;AAAA,GAMXqP,0BAAgD;AAAA,EACpDnb,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQ4G;AAAAA,IAAAA,IAAa/G,SACtB,CAACqY,MAAM,IAAItY,KAAKqJ,oBAChBpC,SAASD,UAAU;AAAA,MAACE,aAAa;AAAA,MAAMC,gBAAgB;AAAA,IAAK,CAAA,GAC5D6S,WAAW,MAAMxB,gBAAgBF,QAAQrY,OAAO;AAClD,QAAA;AACF,YAAMgH,OAAOqH,QAAQ;AAAA,QAACC,QAAQ;AAAA,QAAUG,KAAK,SAASsL,QAAQ;AAAA,MAAA,CAAG,GACjE5Z,OAAOuC,MAAM,gBAAgB;AAAA,aACtBxC,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA4BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC3D;AAEJ;AAIA,eAAe8V,gBAAgByB,WAA+Bha,SAA4B;AACxF,QAAMia,kBAAkBD,aAAaA,UAAUjN,YAAAA,GACzC;AAAA,IAACtB;AAAAA,IAAQ1E;AAAAA,EAAAA,IAAa/G,SAGtBka,UAAU,MAFDnT,UAAU;AAAA,IAACE,aAAa;AAAA,IAAMC,gBAAgB;AAAA,EAAK,CAAA,EAErCmH,QAAsB;AAAA,IAACwD,KAAK;AAAA,EAAA,CAAQ;AACjE,MAAIoI,iBAAiB;AACbtM,UAAAA,WAAWuM,QAAQ7D,OAAQgC,CAAWA,WAAAA,OAAOA,OAAOtL,YAAY,MAAMkN,eAAe,EAAE,CAAC;AAC9F,QAAI,CAACtM;AACH,YAAM,IAAI/O,MAAM,WAAWob,SAAS,aAAa;AAGnD,WAAOrM,SAASnI;AAAAA,EAAAA;AAGZoI,QAAAA,UAAUsM,QAAQ7U,IAAKgT,CAAY,YAAA;AAAA,IAAChQ,OAAOgQ,OAAO7S;AAAAA,IAAI7G,MAAM0Z,OAAOA;AAAAA,EAAAA,EAAQ;AACjF,SAAO5M,OAAOM,OAAO;AAAA,IACnBtJ,SAAS;AAAA,IACTuF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;ACjDA,MAAMnD,aAAW;AAAA;AAAA;AAAA,GAKX0P,yBAA+C;AAAA,EACnDxb,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,QAAUH,SACX;AAAA,MAAC+G;AAAAA,IAAAA,IAAa/G,SAEdka,UAAU,MADDnT,UAAU;AAAA,MAACE,aAAa;AAAA,MAAMC,gBAAgB;AAAA,IAAK,CAAA,EACrCmH,QAAsB;AAAA,MAACwD,KAAK;AAAA,IAAA,CAAQ;AACjE1R,WAAOuC,MAAMwX,QAAQ7U,IAAKgT,YAAWA,OAAOA,MAAM,EAAE3X,KAAK;AAAA,CAAI,CAAC;AAAA,EAAA;AAElE;ACpBO,SAAS0Z,yBAAyBtN,aAAqC;AAC5E,MAAI,CAACA;AACI,WAAA;AAGHnO,QAAAA,OAAO,GAAGmO,WAAW;AAEvBnO,SAAAA,KAAKoO,YAAY,MAAMpO,OAClB,gDAGLA,KAAK4J,SAAS,IACT,oDAGL5J,KAAK4J,SAASyE,KACT,6CAGJ,aAAaC,KAAKtO,IAAI,IAItB,yBAAyBsO,KAAKtO,IAAI,IAInC,QAAQsO,KAAKtO,IAAI,IACZ,yDAGF,KAPE,0EAJA;AAYX;AC9BO,SAAS0b,0BACd5O,QACA0B,UAAgD,IAC/B;AACjB,SAAO1B,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACT2K,UAAWzO,CAAS,SACNyb,yBAAyBzb,IAAI,KAKlC;AAAA,IAET,GAAGwO;AAAAA,EAAAA,CACJ;AACH;AChBO,MAAMmN,eAAe;AAErB,SAASC,YAAYvT,QAAyD;AACnF,SAAOA,OAAOqH,QAAkC;AAAA,IAACI,KAAK;AAAA,EAAA,CAAW;AACnE;AAEgB+L,SAAAA,YACdxT,QACAyT,WACA3N,aACsC;AACtC,SAAO4N,OAAO1T,QAAQ,OAAOyT,WAAW3N,cAAc;AAAA,IAACA;AAAAA,MAAeiB,MAAS;AACjF;AAEgB4M,SAAAA,YACd3T,QACAyT,WACA3N,aACsC;AACtC,SAAO4N,OAAO1T,QAAQ,SAASyT,WAAW3N,cAAc;AAAA,IAACA;AAAAA,MAAeiB,MAAS;AACnF;AAEgB6M,SAAAA,YACd5T,QACAyT,WACsC;AACbA,SAClBC,OAAO1T,QAAQ,SAAS,GAAGyT,SAAS,WAAW,EAAE;AAC1D;AAEgBI,SAAAA,YAAY7T,QAAsByT,WAAgD;AACzFC,SAAAA,OAAO1T,QAAQ,UAAUyT,SAAS;AAC3C;AAEA,SAASC,OACP1T,QACAsH,QACAmM,WACAhO,MACA;AACA,SAAOzF,OAAOqH,QAAQ;AAAA,IAACC;AAAAA,IAAQG,KAAK,YAAYgM,SAAS;AAAA,IAAIhO;AAAAA,EAAAA,CAAK;AACpE;ACrCaqO,MAAAA,qBAAuC,OAAO/a,MAAMC,YAAY;AACrE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQsL;AAAAA,EAAUzL,IAAAA,SAC9B,CAAG0W,EAAAA,OAAOqE,aAAa,IAAIhb,KAAKqJ,oBAChCpC,SAASD,UAAU,GAEnBiU,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAIpc,MAAMoc,SAAS;AAG3B,QAAM,CAACzN,UAAU0N,SAASC,eAAe,IAAI,MAAM5X,QAAQY,IAAI,CAC7D8C,OAAOuG,SAASC,KAAK,EAAE2N,KAAMC,CAAAA,SAASA,KAAK/V,IAAKgW,CAAAA,OAAOA,GAAG1c,IAAI,CAAC,GAC/D2c,YAAwBtU,MAAM,EAAEmU,KAAMC,CAAAA,SAASA,KAAK/V,IAAKgW,QAAOA,GAAG1c,IAAI,CAAC,GACxEqI,OAAOqH,QAAQ;AAAA,IAACI,KAAK;AAAA,EAAY,CAAA,CAAC,CACnC;AAED,MAAIgM,YAAY,OAAO/D,SAAS2D,0BAA0B5O,MAAM,IAC5D8P,kBAAkBd;AAQtB,MANIA,UAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU1V,MAAM,CAAC,IAE7BwW,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS,IAG3CQ,QAAQxC,SAASgC,SAAS;AAC5B,UAAM,IAAI7b,MAAM,kBAAkB2c,eAAe,kBAAkB;AAGrE,MAAIR,eAAe;AACXU,UAAAA,aAAa5O,oBAAoBkO,aAAa;AAChDU,QAAAA;AACI,YAAA,IAAI7c,MAAM6c,UAAU;AAAA,EAAA;AAI9B,QAAM3O,cAAc,OAAOiO,iBAAiB7N,qBAAqBzB,MAAM;AACvE,MAAIqB,eAAe,CAACS,SAASkL,SAAS3L,WAAW;AAC/C,UAAM,IAAIlO,MAAM,YAAYkO,WAAW,mBAAmB;AAIxD,MAAA,CADmBoO,gBAAgBzC,SAAS,2BAA2B;AAEnE,UAAA,IAAI7Z,MAAM,4CAA4C;AAG1D,MAAA;AACF,UAAM0c,YAAwBtU,QAAQyT,WAAW3N,WAAW,GAC5D3M,OAAOuC,MACL,iBAAiB6Y,eAAe,YAC9BzO,eAAe,iBAAiBA,WAAW,EAAE,eAEjD;AAAA,WACO5M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAAmCsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAEpE;ACzDA,SAAS+R,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE2W,OAAO,SAAS;AAAA,IAAC1T,MAAM;AAAA,EAAU,CAAA,EAAEmB;AAC/F;AAMawS,MAAAA,qBAAyD,OAAO5b,MAAMC,YAAY;AACvF,QAAA;AAAA,IAAC+G;AAAAA,IAAW0E;AAAAA,IAAQtL;AAAAA,MAAUH,SAC9B,CAAGqb,EAAAA,EAAE,IAAItb,KAAKqJ,oBACd;AAAA,IAACwS;AAAAA,MAAS,MAAMpH,gBAAczU,IAAI,GAClCiH,SAASD,UAAU;AACzB,MAAI,CAACsU;AACG,UAAA,IAAIzc,MAAM,qCAAqC;AAGnD6b,MAAAA,YAAY,GAAGY,EAAE;AACfQ,QAAAA,UAAUzB,yBAAyBK,SAAS;AAC9CoB,MAAAA;AACIA,UAAAA;AAERpB,cAAYA,UAAUe,WAAWlB,YAAY,IAAIG,UAAU1V,MAAM,CAAC,IAAI0V;AAEtE,QAAM,CAACqB,cAAc,IAAI,MAAMxY,QAAQY,IAAI,CAACoX,YAAwBtU,MAAM,CAAC,CAAC,GACtE+U,cAAcD,eAAe1R,KAAM4R,CAASA,SAAAA,KAAKrd,SAAS8b,SAAS,GACnEhY,UACJsZ,eAAeA,YAAYjP,cACvB,mCAAmCiP,YAAYjP,WAAW,OAC1D;AAEF8O,SAAAA,QACFzb,OAAO0L,KAAK,0DAA0D4O,SAAS,GAAG,IAElF,MAAMhP,OAAOM,OAAO;AAAA,IAClB/D,MAAM;AAAA,IACNvF,SAAS,GAAGA,OAAO;AAAA;AAAA,IACnB4T,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGC,KAAK;AAAA,IACnC9O,UAAW6O,CACFA,UAAAA,UAAUxB,aAAa;AAAA,EAEjC,CAAA,GAGIa,YAAwBtU,QAAQyT,SAAS,EAAEU,KAAK,MAAM;AAC3Dhb,WAAOuC,MAAM,oCAAoC;AAAA,EAAA,CAClD;AACH,GC9CayZ,mBAAqC,OAAOpc,MAAMC,YAAY;AACnE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQsL;AAAAA,EAAAA,IAAUzL,SAC9B,CAAA,EAAG0W,OAAOqE,aAAa,IAAIhb,KAAKqJ,oBAChC9I,QAAQP,KAAKQ,YACbyG,SAASD,aAETiU,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAIpc,MAAMoc,SAAS;AAG3B,QAAM,CAACzN,UAAUuO,cAAc,IAAI,MAAMxY,QAAQY,IAAI,CACnD8C,OAAOuG,SAASC,KAAO2N,EAAAA,KAAMC,UAASA,KAAK/V,IAAKgW,CAAOA,OAAAA,GAAG1c,IAAI,CAAC,GAC/D2c,YAAwBtU,MAAM,CAAC,CAChC,GACKiU,UAAUa,eAAezW,IAAK+W,CAAAA,OAAOA,GAAGzd,IAAI;AAElD,MAAI8b,YAAY,OAAO/D,SAAS2D,0BAA0B5O,MAAM,IAC5D8P,kBAAkBd;AAQtB,MANIA,UAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU1V,MAAM,CAAC,IAE7BwW,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS,IAG3C,CAACQ,QAAQxC,SAASgC,SAAS;AAC7B,UAAM,IAAI7b,MAAM,kBAAkB2c,eAAe,mBAAmB;AAGhEzO,QAAAA,cAAc,OAAOiO,iBAAiB7N,qBAAqBzB,MAAM,IACjEgQ,aAAa5O,oBAAoBC,WAAW;AAC9C2O,MAAAA;AACI,UAAA,IAAI7c,MAAM6c,UAAU;AAGxB,MAAA,CAAClO,SAASkL,SAAS3L,WAAW;AAChC,UAAM,IAAIlO,MAAM,YAAYkO,WAAW,mBAAmB;AAG5D,QAAMiP,cAAcD,eAAe1R,KAAM4R,CAASA,SAAAA,KAAKrd,SAAS8b,SAAS;AAErEsB,MAAAA,eAAeA,YAAYjP,aAAa;AAC1C,QAAIiP,YAAYjP,gBAAgBA;AAC9B,YAAM,IAAIlO,MAAM,iBAAiB2c,eAAe,sBAAsBzO,WAAW,EAAE;AAGhFxM,UAAMsb,SACT,MAAMnQ,OAAOM,OAAO;AAAA,MAClB/D,MAAM;AAAA,MACNvF,SAAS,oCAAoCsZ,YAAYjP,WAAW;AAAA;AAAA;AAAA,MAEpEuJ,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGlP,YAAY;AAAA,MAC1CK,UAAW6O,CACFA,UAAAA,UAAU,SAAS;AAAA,IAAA,CAE7B;AAAA,EAAA;AAID,MAAA;AACF,UAAMX,YAAwBtU,QAAQyT,WAAW3N,WAAW,GAC5D3M,OAAOuC,MAAM,iBAAiB6Y,eAAe,cAAczO,WAAW,eAAe;AAAA,WAC9E5M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAA+BsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAEhE;AC9DA,SAAS+R,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE2W,OAAO,SAAS;AAAA,IAAC1T,MAAM;AAAA,EAAU,CAAA,EAAEmB;AAC/F;AAEakT,MAAAA,qBAAoD,OAAOtc,MAAMC,YAAY;AAClF,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQsL;AAAAA,MAAUzL,SAC9B,CAAG0W,EAAAA,KAAK,IAAI3W,KAAKqJ,oBACjB;AAAA,IAACwS;AAAAA,EAAAA,IAAS,MAAMpH,gBAAczU,IAAI,GAClCiH,SAASD,UAETiU,GAAAA,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAIpc,MAAMoc,SAAS;AAG3B,QAAMc,iBAAiB,MAAMR,YAAwBtU,MAAM;AAE3D,MAAIyT,YAAY,OAAO/D,SAAS2D,0BAA0B5O,MAAM,IAC5D8P,kBAAkBd;AAElBA,YAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU1V,MAAM,CAAC,IAE7BwW,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS;AAI/C,QAAMsB,cAAcD,eAAe1R,KAAM4R,CAASA,SAAAA,KAAKrd,SAAS8b,SAAS;AACzE,MAAI,CAACsB;AACH,UAAM,IAAInd,MAAM,kBAAkB2c,eAAe,kBAAkB;AAGrE,MAAI,CAACQ,YAAYjP;AACf,UAAM,IAAIlO,MAAM,kBAAkB2c,eAAe,8BAA8B;AAG7EK,UACFzb,OAAO0L,KAAK,2DAA2D0P,eAAe,GAAG,IAEzF,MAAM9P,OAAOM,OAAO;AAAA,IAClB/D,MAAM;AAAA,IACNvF,SAAS,mEAAmEsZ,YAAYjP,WAAW;AAAA;AAAA;AAAA,IAEnGuJ,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGlP,YAAY;AAAA,IAC1CK,UAAW6O,CACFA,UAAAA,UAAU,SAAS;AAAA,EAAA,CAE7B;AAGC,MAAA;AACF,UAAMhU,SAAS,MAAMqT,YAAwBtU,QAAQyT,SAAS;AAC9Dta,WAAOuC,MACL,iBAAiB6Y,eAAe,kBAAkBtT,OAAO6E,WAAW,eACtE;AAAA,WACO5M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAAiCsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAElE,GC/DMgI,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAoCX6R,eAAqC;AAAA,EACzC3d,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA,CAACuc,IAAI,IAAIxc,KAAKqJ;AACpB,YAAQmT,MAAI;AAAA,MACV,KAAK;AACGzB,cAAAA,mBAAmB/a,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACG2b,cAAAA,mBAAmB5b,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACGqc,cAAAA,mBAAmBtc,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACGmc,cAAAA,iBAAiBpc,MAAMC,OAAO;AACpC;AAAA,MACF;AACE,cAAM,IAAIpB,MAAMma,iBAAAA;AAAAA;AAAAA;AAAAA,SAGf;AAAA,IAAA;AAAA,EACL;AAEJ;ACrDsByD,eAAAA,oBACpBlc,OACAN,SACe;AACT,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C,SAC7BgH,SAASD,aACTrB,YAAYsB,OAAOK,OAAO,EAAE3B,WAC5B2K,QAA2C,CAAC;AAC9C7D,MAAAA;AAEAlM,QAAMmc,UAAUnc,MAAMmc,UAAU,MAClCpM,MAAMoM,SAAS,GAAGnc,MAAMmc,MAAM,KAE5Bnc,MAAMgQ,SAAShQ,MAAMgQ,QAAQ,MAC/BD,MAAMC,QAAQ,GAAGhQ,MAAMgQ,KAAK;AAG1B,MAAA;AACS,eAAA,MAAMtJ,OAAOqH,QAAiC;AAAA,MACvDC,QAAQ;AAAA,MACRG,KAAK,aAAa/I,SAAS;AAAA,MAC3B2K;AAAAA,IAAAA,CACD;AAAA,WACMjQ,OAAO;AACVA,UAAMkM,aACRnM,OAAOC,MAAM,GAAGuC,OAAMwF,IAAI;AAAA,EAA8B/H,MAAMoM,SAASC,KAAKhK,OAAO,EAAE,CAAC;AAAA,CAAI,IAE1FtC,OAAOC,MAAM,GAAGuC,OAAMwF,IAAI;AAAA,EAA8B/H,MAAMqC,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,EAAA;AAI5E+J,MAAAA,YAAYA,SAASjE,SAAS,GAAG;AAC7B6O,UAAAA,QAAQ,IAAIC,0BAAM;AAAA,MACtBE,OAAO;AAAA,MACPD,SAAS,CACP;AAAA,QAAC3Y,MAAM;AAAA,QAAM4Y,OAAO;AAAA,QAAUC,WAAW;AAAA,MAAA,GACzC;AAAA,QAAC7Y,MAAM;AAAA,QAAiB4Y,OAAO;AAAA,QAAkBC,WAAW;AAAA,MAAA,GAC5D;AAAA,QAAC7Y,MAAM;AAAA,QAAiB4Y,OAAO;AAAA,QAAkBC,WAAW;AAAA,MAAA,GAC5D;AAAA,QAAC7Y,MAAM;AAAA,QAAS4Y,OAAO;AAAA,QAASC,WAAW;AAAA,MAAA,GAC3C;AAAA,QAAC7Y,MAAM;AAAA,QAAe4Y,OAAO;AAAA,QAAgBC,WAAW;AAAA,MAAA,GACxD;AAAA,QAAC7Y,MAAM;AAAA,QAAe4Y,OAAO;AAAA,QAAgBC,WAAW;AAAA,MAAA,GACxD;AAAA,QAAC7Y,MAAM;AAAA,QAAa4Y,OAAO;AAAA,QAAcC,WAAW;AAAA,MAAO,CAAA;AAAA,IAAA,CAE9D;AAEDhL,aAAS5F,QAAS8V,CAAQ,QAAA;AAClB,YAAA;AAAA,QAAClX;AAAAA,QAAImX;AAAAA,QAAO7a;AAAAA,QAAW8a;AAAAA,QAAWC;AAAAA,QAAe9B;AAAAA,QAAe+B;AAAAA,MAAAA,IAAeJ;AAErF,UAAIK,cAAc;AACdjb,oBAAc,OAChBib,cAAcC,QAAoBC,oBAAAA,QAAAA,SAASnb,SAAS,CAAC;AAGvD,UAAIob,YAAY;AACZN,oBAAc,OAChBM,YAAYC,uBAAeF,QAAAA,SAASL,SAAS,GAAGK,QAAAA,SAASnb,SAAS,CAAC;AAGjEsb,UAAAA;AACJ,cAAQT,OAAK;AAAA,QACX,KAAK;AACK,kBAAA;AACR;AAAA,QACF,KAAK;AACK,kBAAA;AACR;AAAA,QACF,KAAK;AACK,kBAAA;AACR;AAAA,QACF;AACU,kBAAA;AAAA,MAAA;AAGZvF,YAAMK,OACJ;AAAA,QACEjS;AAAAA,QACAmX;AAAAA,QACAG;AAAAA,QACAC,aAAa,GAAGA,WAAW;AAAA,QAC3BG;AAAAA,QACAL;AAAAA,QACA9B;AAAAA,MAAAA,GAEF;AAAA,QAACqC;AAAAA,MAAAA,CACH;AAAA,IAAA,CACD,GAEDhG,MAAMQ,WAAW;AAAA,EACnB;AACEzX,WAAOuC,MAAM,iDAAiD;AAElE;AC5GO,MAAM2a,eAAeA,CAACrW,QAAsByH,KAAa3G,SAAS,OAAkB;AACnFT,QAAAA,SAASL,OAAOK,OAAO;AAEtB,SAAA,GADMS,SAAST,OAAOiW,SAASjW,OAAOwK,GAC/B,IAAIpD,IAAI2K,QAAQ,OAAO,EAAE,CAAC;AAC1C,GCOM3O,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuCjB,SAAS+J,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrD2W,OAAO,UAAU;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAS,EACjC0T,OAAO,QAAQ;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAU,EAChC0T,OAAO,SAAS;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAS,EAChC0T,OAAO,UAAU;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAS,EACjC0T,OAAO,gBAAgB;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAU,EACxC0T,OAAO,UAAU;AAAA,IAAC1T,MAAM;AAAA,EAAU,CAAA,EAAEmB;AACzC;AAEA,MAAM0G,WAAYgC,CAAAA,SACT,IAAI0L,KAAAA,WAAqCC,CAAa,aAAA;AAC3D,MAAIC,iBAAiB,IAAIC,qBAAAA,QAAY7L,IAAG,GACpC8L,UAAU;AAEd,WAASC,QAAQxd,OAAgB;AAC3Bqd,sBACFA,eAAeI,MAGjBlR,GAAAA,QAAM,mBAAmBvM,KAAK,EAAE,GAC5Bud,CAGJH,YAAAA,SAASM,KAAK;AAAA,MAAC9V,MAAM;AAAA,IAAY,CAAA,GACjCyV,iBAAiB,IAAIC,6BAAY7L,IAAG;AAAA,EAAA;AAGtC,WAASkM,eAAe3d,OAAqB;AAC3Cud,cAAU,IACVF,eAAeI,MACfL,GAAAA,SAASpd,MAAMA,KAAK;AAAA,EAAA;AAGtB,WAAS4d,UAAUC,OAAqB;AACtC,UAAMvR,OAAOvK,KAAK4G,MAAMkV,MAAMvR,IAAI;AAC9BA,SAAKiQ,UAAU,YACjBhQ,QAAM,wBAAwBsR,KAAK,GACnCT,SAASpd,MAAM6d,KAAK,KACXvR,KAAKiQ,UAAU,eACxBhQ,QAAM,2BAA2BsR,KAAK,GACtCC,WAAW,MAEXvR,QAAM,4BAA4BsR,KAAK,GACvCT,SAASM,KAAKpR,IAAI;AAAA,EAAA;AAItB,WAASwR,aAAa;AACLC,mBAAAA,oBAAoB,SAASP,OAAO,GACnDH,eAAeU,oBAAoB,iBAAiBJ,cAAc,GAClEN,eAAeU,oBAAoB,OAAOH,SAAS,GACnDP,eAAeU,oBAAoB,QAAQD,UAAU,GACrDT,eAAeI,MAAAA,GACfL,SAASY,SAAS;AAAA,EAAA;AAGpBX,iBAAeY,iBAAiB,SAAST,OAAO,GAChDH,eAAeY,iBAAiB,iBAAiBN,cAAc,GAC/DN,eAAeY,iBAAiB,OAAOL,SAAS,GAChDP,eAAeY,iBAAiB,QAAQH,UAAU;AACpD,CAAC,GAGGI,iBAAiBA,CACrBC,OACAvX,QACA7G,WACkB;AAClB,MAAIqe,kBAAkB;AAEtB,QAAMnd,UAAUlB,OAAOkB,QAAQ,CAAE,CAAA,EAAE3C,MAAM,GACnC+f,YAAYpB,aAAarW,QAAQ,QAAQuX,KAAK,SAAS;AAEvD,SAAA5R,QAAA,gBAAgB8R,SAAS,EAAE,GAE1B,IAAInb,QAAQ,CAAC7C,SAAS+C,WAAW;AAC7Bib,aAAAA,SAAS,EAAEC,UAAU;AAAA,MAC5BZ,MAAOG,CAAU,UAAA;AACX,eAAOA,MAAMpO,YAAa,aAC5B2O,kBAAkBP,MAAMpO,WAG1BxO,QAAQ4H,OAAO,qBAAqBuV,eAAe;AAAA,MACrD;AAAA,MACApe,OAAQF,CAAQ,QAAA;AACNsC,gBAAAA,QACRgB,OAAO,IAAI5E,MAAM,GAAGsB,IAAIwM,IAAI,EAAE,CAAC;AAAA,MACjC;AAAA,MACA0R,UAAUA,MAAM;AACN9b,gBAAAA,QAAQ,gBAAgB,GAChC7B,QAAQ;AAAA,MAAA;AAAA,IACV,CACD;AAAA,EAAA,CACF;AACH,GAEMke,qBAA6D;AAAA,EACjEhgB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aACE;AAAA,EACFI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQsL;AAAAA,MAAQ9I,OAAAA;AAAAA,IAAAA,IAAS3C,SAErCM,QAA0B,MAAMkU,gBAAczU,IAAI,GAClDiH,SAASD,UAAU;AAEzB,QAAIzG,MAAMkN,MAAM;AACRgP,YAAAA,oBAAoBlc,OAAON,OAAO;AACxC;AAAA,IAAA;AAGF,QAAIM,MAAMse,QAAQ;AAChB,YAAML,QAAQje,MAAMse;AAEpB,UAAI,CAACL;AACG,cAAA,IAAI3f,MAAM,uBAAuB;AAGnC0f,YAAAA,eAAeC,OAAOvX,QAAQ7G,MAAM;AAC1C;AAAA,IAAA;AAGF,UAAM,CAAC0c,eAAe9B,aAAa,IAAIhb,KAAKqJ,oBACtCyV,oBAAoB/F,CAAQxY,CAAAA,MAAM,cAAc,GAEhD0a,YAAY6B,iBAAiBhQ,oBAAoBgQ,aAAa;AAChE7B,QAAAA;AACI,YAAA,IAAIpc,MAAMoc,SAAS;AAG3B,UAAM8D,mBAAmB,MAAM9X,OAAOuG,SACnCC,KAAK,EACL2N,KAAM5N,CAAaA,aAAAA,SAASlI,IAAKgW,CAAOA,OAAAA,GAAG1c,IAAI,CAAC,GAE7CogB,oBAAoB,OAAOlC,iBAC/B3P,qBAAqBzB,QAAQ;AAAA,MAAChJ,SAAS;AAAA,IAAA,CAAuB;AAC5D,QAAA,CAACqc,iBAAiBrG,SAASsG,iBAAiB;AAC9C,YAAM,IAAIngB,MAAM,mBAAmBmgB,iBAAiB,iBAAiB;AAGvE,UAAMC,oBAAoB,OAAOjE,iBAC/B7N,qBAAqBzB,QAAQ;AAAA,MAAChJ,SAAS;AAAA,IAAA,CAAuB;AAC5Dqc,QAAAA,iBAAiBrG,SAASuG,iBAAiB;AAC7C,YAAM,IAAIpgB,MAAM,mBAAmBogB,iBAAiB,kBAAkB;AAGlE9e,UAAAA,MAAM2M,oBAAoBmS,iBAAiB;AAC7C9e,QAAAA;AACI,YAAA,IAAItB,MAAMsB,GAAG;AAGjB,QAAA;AACIsM,YAAAA,WAAW,MAAMxF,OAAOqH,QAA6B;AAAA,QACzDC,QAAQ;AAAA,QACRG,KAAK,aAAasQ,iBAAiB;AAAA,QACnCtS,MAAM;AAAA,UACJsO,eAAeiE;AAAAA,UACfC,aAAaJ;AAAAA,QAAAA;AAAAA,MACf,CACD;AAcD,UAZA1e,OAAOuC,MACL,mBAAmBC,OAAMgM,MAAMoQ,iBAAiB,CAAC,OAAOpc,OAAMgM,MAAMqQ,iBAAiB,CAAC,KACxF,GAEKH,qBACH1e,OAAOuC,MACL,+GACF,GAGFvC,OAAOuC,MAAM,OAAOC,OAAMgM,MAAMnC,SAAS+R,KAAK,CAAC,UAAU,GAErDje,MAAM4e;AACR;AAGF,YAAMZ,eAAe9R,SAAS+R,OAAOvX,QAAQ7G,MAAM,GACnDA,OAAOuC,MAAM,OAAOC,OAAMgM,MAAMnC,SAAS+R,KAAK,CAAC,YAAY;AAAA,aACpDne,OAAO;AACVA,YAAMkM,aACRnM,OAAOuC,MAAM,GAAGC,OAAMwF,IAAI;AAAA,EAA4B/H,MAAMoM,SAASC,KAAKhK,OAAO,EAAE,CAAC;AAAA,CAAI,IAExFtC,OAAOuC,MAAM,GAAGC,OAAMwF,IAAI;AAAA,EAA4B/H,MAAMqC,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAE9E;AAEJ,GC7OMgI,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAUX0U,eAAe,CAAC,WAAW,UAAU,QAAQ,GAM7CC,uBAA0D;AAAA,EAC9DzgB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQsL;AAAAA,IAAAA,IAAUzL,SAC9BM,QAAQP,KAAKQ,YACb,CAACkF,OAAO,IAAI1F,KAAKqJ,oBACjBpC,SAASD,UAAU,GAEnBiU,YAAYvV,WAAWoH,oBAAoBpH,OAAO;AACpDuV,QAAAA;AACI,YAAA,IAAIpc,MAAMoc,SAAS;AAGrB,UAAA,CAACzN,UAAU2N,eAAe,IAAI,MAAM5X,QAAQY,IAAI,CACpD8C,OAAOuG,SAASC,OAAO2N,KAAMC,CAAAA,SAASA,KAAK/V,IAAKgW,CAAAA,OAAOA,GAAG1c,IAAI,CAAC,GAC/DqI,OAAOqH,QAAQ;AAAA,MAACI,KAAK;AAAA,IAAY,CAAA,CAAC,CACnC;AAED,QAAInO,MAAM+e,cAAc,CAACF,aAAa1G,SAASnY,MAAM+e,UAAU;AAC7D,YAAM,IAAIzgB,MAAM,oBAAoB0B,MAAM+e,UAAU,eAAe;AAGrE,UAAMvS,cAAc,OAAOrH,WAAWyH,qBAAqBzB,MAAM;AAC7D8B,QAAAA,SAASkL,SAAS3L,WAAW;AAC/B,YAAM,IAAIlO,MAAM,YAAYkO,WAAW,kBAAkB;AAGrDwS,UAAAA,mBAAmBpE,gBAAgBzC,SAAS,gBAAgB;AAC5D9L,YAAA,8BAA8B2S,mBAAmB,QAAQ,QAAQ;AAGjEC,UAAAA,UAAU,QADOD,mBAAmBhf,MAAM+e,aAAa,aACpBG,2BAA2B/T,QAAQtL,MAAM;AAE9E,QAAA;AACI6G,YAAAA,OAAOuG,SAASS,OAAOlB,aAAa;AAAA,QAACyS;AAAAA,MAAAA,CAAQ,GACnDpf,OAAOuC,MAAM,8BAA8B;AAAA,aACpCxC,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA6BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC5D;AAEJ;AAEA,eAAe+c,2BAA2B/T,QAAqBtL,QAAsB;AAC7Esf,QAAAA,OAAO,MAAMhU,OAAOM,OAA6B;AAAA,IACrD/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACTmL,SAAS,CACP;AAAA,MACEvF,OAAO;AAAA,MACP1J,MAAM;AAAA,IAAA,GAER;AAAA,MACE0J,OAAO;AAAA,MACP1J,MAAM;AAAA,IACP,CAAA;AAAA,EAAA,CAEJ;AAEG8gB,SAAAA,SAAS,aACXtf,OAAOuC,MACL;AAAA,CACF,GAGK+c;AACT;AC3FA,IAAe,eAAA;AAAA,EACb9gB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,MAAMkV,2BAAiD;AAAA,EACrD/gB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPF,UAAU;AAAA,EACVH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtB,CAAC4K,QAAQyQ,IAAIkE,OAAO,IAAIxf,KAAKqJ,oBAC7BpC,SAASD,UAAU;AAErB,QAAA,CAACC,OAAOuG,SAASoS;AACnB,YAAM,IAAI/gB,MAAM;AAAA,6BAAmE;AAGrF,QAAI,CAACgM;AACG,YAAA,IAAIhM,MAAM,mCAAmC;AAGrD,QAAI,CAAC,CAAC,OAAO,KAAK,EAAE6Z,SAAS7N,MAAM;AAC3B,YAAA,IAAIhM,MAAM,uCAAuC;AAGzD,QAAI,CAACyc;AACG,YAAA,IAAIzc,MAAM,+BAA+B;AAG7CgM,QAAAA,WAAW,SAAS,CAAC2U;AACjB,YAAA,IAAI3gB,MAAM,mDAAmD;AAGrE,UAAM6G,UAAU,GAAG4V,EAAE,IACfQ,UAAUhP,oBAAoBpH,OAAO;AACvCoW,QAAAA;AACI,YAAA,IAAIjd,MAAMid,OAAO;AAGnB3V,UAAAA,WAAW,MAAMc,OAAOuG,SAASC,KAAAA,GAAQpD,KAAMwV,CAAAA,SAASA,KAAKjhB,SAAS8G,OAAO;AAEnF,QAAI,CAACS;AACG,YAAA,IAAItH,MAAM,mBAAmB;AAGrC,QAAIgM,WAAW,OAAO;AACblI,aAAAA,MAAMwD,QAAQqZ,OAAO;AAC5B;AAAA,IAAA;AAGErZ,QAAAA,QAAQqZ,YAAYA,SAAS;AACxB7c,aAAAA,MAAM,uBAAuB6c,OAAO,QAAQ;AACnD;AAAA,IAAA;AAGEA,gBAAY,aACdpf,OAAOuC,MACL;AAAA,CACF,GAGF,MAAMsE,OAAOuG,SAASoS,KAAKla,SAAS;AAAA,MAAC8Z;AAAAA,IAAAA,CAAyC,GAC9Epf,OAAOuC,MAAM,4BAA4B;AAAA,EAAA;AAE7C,GC5DM+H,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUjB,SAAS+J,gBAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE2W,OAAO,SAAS;AAAA,IAAC1T,MAAM;AAAA,EAAU,CAAA,EAAEmB;AAC/F;AAMA,MAAM0W,uBAAiE;AAAA,EACrElhB,MAAM;AAAA,EACNgM,OAAO;AAAA,EAAA,UACPF;AAAAA,EACAH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW0E;AAAAA,MAAQtL;AAAAA,QAAUH,SAC9B;AAAA,MAAC4b;AAAAA,IAAAA,IAAS,MAAMpH,gBAAczU,IAAI,GAClC,CAACsb,EAAE,IAAItb,KAAKqJ;AAClB,QAAI,CAACiS;AACG,YAAA,IAAIzc,MAAM,+BAA+B;AAGjD,UAAM6G,UAAU,GAAG4V,EAAE,IACfQ,UAAUhP,oBAAoBpH,OAAO;AACvCoW,QAAAA;AACIA,YAAAA;AAGJD,YACFzb,OAAO0L,KAAK,4DAA4DpG,OAAO,GAAG,IAElF,MAAMgG,OAAOM,OAAO;AAAA,MAClB/D,MAAM;AAAA,MACNvF,SACE;AAAA;AAAA,MACF4T,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGC,KAAK;AAAA,MACnC9O,UAAW6O,CACFA,UAAAA,UAAUxW,WAAW;AAAA,IAAA,CAE/B,GAGH,MAAMsB,UAAU,EAAEwG,SAASuS,OAAOra,OAAO,GACzCtF,OAAOuC,MAAM,8BAA8B;AAAA,EAAA;AAE/C,GClDMqd,OAAOA,MAAM,MAEbtV,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAwCjB,SAASuV,aAAWC,UAA0C;AAC5D,QAAM3f,QAA2B,CAAC;AAClC,SAAI2f,SAASC,UACX5f,MAAM4f,QAAQ,GAAGD,SAASC,KAAK,GAAGC,MAAM,GAAG,IAGzCF,SAAS,mBAAmB,MAC9B3f,MAAM8f,mBAAmBC,SAASJ,SAAS,mBAAmB,GAAG,EAAE,IAGjE,OAAOA,SAASK,MAAQ,QAC1BhgB,MAAMggB,MAAMxH,CAAAA,CAAQmH,SAASK,MAG3B,OAAOL,SAASM,SAAW,QAC7BjgB,MAAMigB,SAASzH,CAAQmH,CAAAA,SAASM,SAG9B,OAAON,SAASO,SAAW,QAC7BlgB,MAAMkgB,SAAS1H,CAAQmH,CAAAA,SAASO,SAG9B,OAAOP,SAASQ,WAAa,QAC/BngB,MAAMmgB,WAAW3H,CAAAA,CAAQmH,SAASQ,WAGhC,OAAOR,SAAS3J,YAAc,QAChChW,MAAMgW,YAAYwC,CAAQmH,CAAAA,SAAS3J,YAGjC,OAAO2J,SAASR,OAAS,QAC3Bnf,MAAMmf,OAAOQ,SAASR,OAGjBnf;AACT;AASA,MAAMogB,uBAA0D;AAAA,EAC9D/hB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAOtC;AAAAA,MAASoL;AAAAA,IAAUzL,IAAAA,SAC9CgH,SAASD,aACT,CAACgU,eAAe4F,iBAAiB,IAAI5gB,KAAKqJ,oBAC1C9I,QAAQ0f,aAAWjgB,KAAKQ,UAAU;AAExC,QAAIkF,UAAUsV,gBAAgB,GAAGA,aAAa,KAAK;AAC9CtV,gBACHA,UAAU,MAAM4H,oBAAoBrN,SAAS;AAAA,MAACyC,SAAS;AAAA,IAAA,CAA2B;AAG9EoZ,UAAAA,UAAUhP,oBAAoBpH,OAAO;AACvCoW,QAAAA;AACIA,YAAAA;AAKJ,QAAA,EADa,MAAM7U,OAAOuG,SAASC,KAAAA,GACzBpD,KAAM2J,CAAAA,QAAQA,IAAIpV,SAAS8G,OAAO;AAC9C,YAAM,IAAI7G,MAAM,sBAAsB6G,OAAO,aAAa;AAItD,UAAA;AAAA,MAACC;AAAAA,IAAAA,IAAasB,OAAOK,OAAO;AAE3B3E,WAAAA,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,WAAW,CAAC,KAAKpS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,SAAS,CAAC,KAAKpS,OAAMgE,KAAKlB,OAAO,EAAEiB,OAAO,EAAE,CAAC,SAAI,GAC9EvG,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,EAAE;AAEf,QAAIke,kBAAkBD;AACjBC,wBACHA,kBAAkB,MAAMnV,OAAOM,OAAO;AAAA,MACpC/D,MAAM;AAAA,MACNvF,SAAS;AAAA,MACTsI,SAASjK,cAAKJ,QAAAA,KAAKL,SAAS,GAAGoF,OAAO,SAAS;AAAA,MAC/C4Q,QAAQD,KAAAA;AAAAA,IAAAA,CACT;AAGH,UAAMyK,aAAa,MAAMC,cAAcF,iBAAiBnb,SAASgG,QAAQnL,KAAK;AAC9E,QAAI,CAACugB,YAAY;AACf1gB,aAAOuC,MAAM,WAAW;AACxB;AAAA,IAAA;AAIEme,mBAAe,OACjB1gB,OAAOuC,MAAM,sBAAsBC,OAAMgE,KAAKlB,OAAO,CAAC,SAAS9C,OAAMgE,KAAKka,UAAU,CAAC,GAAG;AAG1F,QAAIE,cAAc,0BACd1f,UAAUlB,OAAOkB,QAAQ0f,WAAW,EAAEriB,MAAM;AAChD,UAAMsiB,aAAcnR,CAA4BA,cAAA;AAC1CA,MAAAA,UAAS8D,SAASoN,eACpB1f,QAAQiB,WACRjB,UAAUlB,OAAOkB,QAAQwO,UAAS8D,IAAI,EAAEjV,WAC/BmR,UAAS8D,SAASoN,eAAelR,UAAShL,WACnDxD,QAAQ4H,OAAO,GAAG4G,UAAS8D,IAAI,KAAK9D,UAAS3J,OAAO,IAAI2J,UAASiE,KAAK,MAGxEiN,cAAclR,UAAS8D;AAAAA,IAAAA,GAGnBjV,QAAQqD,KAAKjD,IAAI;AACnB,QAAA;AACF,YAAMmiB,+BAAc;AAAA,QAClBja;AAAAA,QACAvB;AAAAA,QACAob;AAAAA,QACAG;AAAAA,QACA,GAAG1gB;AAAAA,MAAAA,CACJ,GACDe,QAAQiB,QAAQ;AAAA,aACTpC,KAAK;AACZmB,YAAAA,QAAQmB,QACFtC;AAAAA,IAAAA;AAGDwC,WAAAA,MAAM,oBAAoBmR,kBAAS9R,QAAAA,KAAKjD,QAAQJ,KAAK,CAAC,GAAG;AAAA,EAAA;AAEpE;AAGA,eAAeoiB,cACbI,aACAzb,SACAgG,QACAnL,OACA;AACA,MAAI4gB,gBAAgB;AACX,WAAA;AAGHC,QAAAA,UAAUrgB,cAAAA,QAAKsgB,WAAWF,WAAW,IACvCA,cACApgB,cAAAA,QAAKL,QAAQnB,QAAQ4B,IAAI,GAAGggB,WAAW;AAE3C,MAAIG,WAAW,MAAMvR,sBAAGwR,KAAKH,OAAO,EAAEI,MAAMxB,IAAI;AAC1CyB,QAAAA,gBAAgBH,WAAWA,SAASI,OAAO,IAAI3gB,cAAAA,QAAKoR,SAASiP,OAAO,EAAEO,QAAQ,GAAG,MAAM;AAE7F,MAAI,CAACL,UAAU;AACb,UAAMM,aAAaH,gBAAgB1gB,cAAAA,QAAKgC,QAAQqe,OAAO,IAAIA;AAErDrR,UAAAA,cAAAA,QAAGtO,MAAMmgB,YAAY;AAAA,MAAClgB,WAAW;AAAA,IAAA,CAAK;AAAA,EAAA;AAGxCmgB,QAAAA,YAAYJ,gBAAgBL,UAAUrgB,cAAAA,QAAKJ,KAAKygB,SAAS,GAAG1b,OAAO,SAAS;AAGlF,SAFA4b,WAAW,MAAMvR,sBAAGwR,KAAKM,SAAS,EAAEL,MAAMxB,IAAI,GAE1C,CAACzf,MAAMgW,aAAa+K,YAAYA,SAASI,OAAAA,KAOvC,CANoB,MAAMhW,OAAOM,OAAO;AAAA,IAC1C/D,MAAM;AAAA,IACNvF,SAAS,SAASmf,SAAS;AAAA,IAC3B7W,SAAS;AAAA,EAAA,CACV,IAGQ,KAIJ6W;AACT;ACtNA,MAAM5I,SAAU1S,CAAAA,QAAgB,WAAaA,GAAG,YAE1CmE,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8DjB,SAASoX,YAAYC,MAAoC;AACvD,SAAO,OAAOA,OAAS,MAAc/T,SAAY+K,CAAQgJ,CAAAA;AAC3D;AAEA,SAAS9B,WAAWC,UAA0C;AACtD8B,QAAAA,gCAAgCF,YAAY5B,SAAS,mCAAmC,CAAC,GACzF+B,qBAAqBH,YAAY5B,SAAS,sBAAsB,CAAC,GACjEG,mBAAmByB,YAAY5B,SAAS,mBAAmB,CAAC,GAC5DgC,gBAAgBJ,YAAY5B,SAAS,gBAAgB,CAAC,GACtDiC,6BAA6BL,YAAY5B,SAAS,+BAA+B,CAAC,GAClFkC,uBAAuBN,YAAY5B,SAAS,wBAAwB,CAAC,GACrE7G,UAAUyI,YAAY5B,SAAS7G,OAAO,GACtCgJ,UAAUP,YAAY5B,SAASmC,OAAO;AACrC,SAAA;AAAA,IACLL;AAAAA,IACAC;AAAAA,IACA5B;AAAAA,IACA8B;AAAAA,IACAC;AAAAA,IACAF;AAAAA,IACA7I;AAAAA,IACAgJ;AAAAA,EACF;AACF;AAEA,MAAMC,uBAA6C;AAAA,EACjD1jB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA;AAAAA,EAEAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO2f;AAAAA,QAAmBtiB,SAC9CM,QAAQ0f,WAAWjgB,KAAKQ,UAAU,GAClC;AAAA,MACJwhB;AAAAA,MACAC;AAAAA,MACA5B;AAAAA,MACA8B;AAAAA,MACAC;AAAAA,MACAF;AAAAA,IACE3hB,IAAAA,OAEE8Q,YAAYmR,qBAAqBxiB,KAAKQ,UAAU,GAChDyG,SAASD,UAET,GAAA,CAACsM,MAAMmP,MAAM,IAAIziB,KAAKqJ;AAC5B,QAAI,CAACiK;AACH,YAAM,IAAIzU,MACR,iFAAiF+D,OAAMoS,KACrF,QACF,CAAC,cACH;AAGF,UAAMgG,gBAAgB,MAAM0H,uBAAuBD,QAAQxiB,OAAO;AAC5D2M,YAAA,mCAAmCoO,aAAa,GAAG;AAEnD2H,UAAAA,QAAQ,gBAAgBzV,KAAKoG,IAAI;AACnCsP,QAAAA,aACAC,YACAC,iBAAiB;AAEjBH,QAAAA;AACF/V,cAAM,2CAA2C,GACjDgW,cAAc,MAAMG,aAAazP,IAAI;AAAA,SAChC;AACL,YAAM0P,aAAajiB,cAAAA,QAAKL,QAAQnB,QAAQ4B,OAAOmS,IAAI,GAC7C2P,YAAY,MAAMlT,sBAAGwR,KAAKyB,UAAU,EAAExB,MAAM,MAAM,IAAI;AAC5D,UAAI,CAACyB;AACH,cAAM,IAAIpkB,MAAM,GAAGmkB,UAAU,oCAAoC;AAGnEF,uBAAiBG,UAAUC,YAAAA,GACvBJ,iBACFF,cAAcI,cAEdH,aAAa9hB,cAAAA,QAAKgC,QAAQigB,UAAU,GACpCJ,cAAc,MAAMO,KAAAA,iBAAiBH,UAAU;AAAA,IAAA;AAInD,UAAMI,eAAenc,OAAOoc,MAAM,EAAE/b,OAAO;AAAA,MAAC5B,SAASsV;AAAAA,IAAAA,CAAc,GAG7D;AAAA,MAACrV;AAAAA,MAAWD;AAAAA,IAAAA,IAAW0d,aAAa9b,OAAO;AAE1C3E,WAAAA,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,WAAW,CAAC,KAAKpS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMoS,KAAK,SAAS,CAAC,KAAKpS,OAAMgE,KAAKlB,OAAO,EAAEiB,OAAO,EAAE,CAAC,SAAI,GAC9EvG,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,EAAE;AAEf,QAAIqe,aACAvC,iBACA6E,WACAC,eAAsD,MACtDC;AAEJ,aAASvC,WAAWpM,MAAqB;AACvC,YAAM4O,mBAAmB5O,KAAKd,OACxB2P,WAAW7O,KAAKjB,QAAQoN;AAU9B,UATAwC,UAAUG,cAAc9O,IAAI,GAExB4O,oBAAoB5O,KAAKd,UAAUc,KAAK1O,YACtCod,gBACFK,cAAcL,YAAY,GAE5BA,eAAe,OAGbG;AACF;AAIF,YAAMG,WAAW7C,aACX8C,gBAAgBR,aAAathB,KAAKjD,IAAI;AAC5CukB,UAAAA,YAAYthB,KAAKjD,OACjBiiB,cAAcnM,KAAKjB,MAEf6K,mBAAmBA,gBAAgBlc,SAAS;AAC9C,cAAMwhB,YAAYjQ,kBAAAA,QAAS9R,KAAKjD,IAAAA,IAAQ+kB,eAAe;AAAA,UACrDE,sBAAsB;AAAA,QAAA,CACvB;AACDvF,wBAAgBvV,OAAO,UAAU2a,QAAQ,KAAKE,SAAS,KACvDtF,gBAAgBlc,QAAQ;AAAA,MAAA;AAG1Bkc,wBAAkBre,OAAOkB,QAAQ,QAAQuT,KAAKjB,IAAI,UAAU,EAAEjV,MAE1D4kB,GAAAA,iBACFK,cAAcL,YAAY,GAC1BA,eAAe,OAGjBA,eAAeU,YAAY,MAAM;AAC/B,cAAMF,YAAYjQ,kBAAAA,QAAS9R,KAAKjD,IAAAA,IAAQ+kB,eAAe;AAAA,UACrDE,sBAAsB;AAAA,QAAA,CACvB;AAEGvF,4BACFA,gBAAgBvV,OAAO,GAAGsa,OAAO,GAAG3O,KAAKjB,IAAI,KAAKmQ,SAAS;AAAA,SAE5D,EAAE;AAAA,IAAA;AAGP,aAASG,QAAQ;AAAA,MAACC;AAAAA,IAAAA,GAA8B;AAC1CZ,UAAAA,gBACFK,cAAcL,YAAY,GAG5BA,eAAe,MAEXY,WAAWb,aAAa7E,iBAAiB;AAC3C,cAAMsF,YAAYjQ,kBAAAA,QAAS9R,KAAKjD,IAAAA,IAAQukB,WAAW;AAAA,UACjDU,sBAAsB;AAAA,QAAA,CACvB;AACDvF,wBAAgBvV,OAAO,UAAU8X,WAAW,KAAK+C,SAAS,KAC1DtF,gBAAgBlc,QAAQ;AAAA,MAAA,MACfkc,oBACTA,gBAAgBhc,KAAK;AAAA,IAAA;AAKrB,QAAA;AACI,YAAA;AAAA,QAAC2hB;AAAAA,QAASC;AAAAA,MAAAA,IAAY,MAAMC,sBAAAA,QAAa1B,aAAa;AAAA,QAC1D3b,QAAQmc;AAAAA,QACRP;AAAAA,QACAxR;AAAAA,QACA4P;AAAAA,QACAgB;AAAAA,QACAD;AAAAA,QACAG;AAAAA,QACAC;AAAAA,QACA/B;AAAAA,QACA6B;AAAAA,MAAAA,CACD;AAEO,cAAA;AAAA,QAACiC,SAAS;AAAA,MAAA,CAAK,GAEvB/jB,OAAOuC,MAAM;AAAA,GAAiDyhB,SAASpJ,aAAa,GACpFuJ,cAAcF,UAAUjkB,MAAM;AAAA,aACvBD,KAAK;AASZ,UARA+jB,QAAQ;AAAA,QAACC,SAAS;AAAA,MAAM,CAAA,GAQpB,EALF,CAAC5B,mBACDpiB,IAAIsM,YACJtM,IAAIsM,SAASF,eAAe,OAC5BpM,IAAIyT,SAAS;AAGPzT,cAAAA;AAGFuC,YAAAA,UAAU,CACdvC,IAAIuC,SACJ,IACA,6BACA,yDACA,gEACA,EAAE,EACF/B,KAAK;AAAA,CAAI,GAGLN,QAAQ,IAAIxB,MAAM6D,OAAO;AACzB8hB,YAAAA,MAAAA,UAAUrkB,IAAIqkB,SACpBnkB,MAAMoM,WAAWtM,IAAIsM,UACrBpM,MAAMokB,eAAetkB,IAAIskB,cAEnBpkB;AAAAA,IAAAA;AAAAA,EACR;AAEJ;AAEA,eAAeqiB,uBAAuBD,QAAgBxiB,SAA4B;AAC1E,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQsL;AAAAA,EAAAA,IAAUzL,SAC9BgH,SAASD,UAAU;AAEzB,MAAIyb,QAAQ;AACJ3G,UAAAA,UAAUhP,oBAAoB2V,MAAM;AACtC3G,QAAAA;AACI,YAAA,IAAIjd,MAAMid,OAAO;AAAA,EAAA;AAI3BlP,UAAM,6BAA6B;AAC7BtL,QAAAA,UAAUlB,OAAOkB,QAAQ,6BAA6B,EAAE3C,MAAM,GAC9D6O,WAAW,MAAMvG,OAAOuG,SAASC,KAAK;AAC5CnM,UAAQiB,QAAQ,oCAAoC;AAEpD,MAAIyY,gBAAgByH,SAAS,GAAGA,MAAM,KAAK;AAC3C,MAAI,CAACzH;AACa,oBAAA,MAAM1N,oBAAoBrN,SAAS;AAAA,MACjDyC,SAAS;AAAA,MACT6K,eAAe;AAAA,IAAA,CAChB;AAAA,WACQ,CAACC,SAASnD,KAAM3E,aAAYA,QAAQ9G,SAASoc,aAAa,GAAG;AAQtE,QAPApO,QAAM,uDAAuD,GAOzD,CANiB,MAAMlB,OAAOM,OAAO;AAAA,MACvC/D,MAAM;AAAA,MACNvF,SAAS,YAAYsY,aAAa;AAAA,MAClChQ,SAAS;AAAA,IAAA,CACV;AAGC,YAAM,IAAInM,MAAM,YAAYmc,aAAa,kBAAkB;AAGvD/T,UAAAA,OAAOuG,SAASS,OAAO+M,aAAa;AAAA,EAAA;AAGrCA,SAAAA;AACT;AAEA,SAASwH,qBAAqBjiB,OAA0B;AAChD,QAAA;AAAA,IAAC8Y;AAAAA,IAASgJ;AAAAA,EAAAA,IAAW9hB;AAC3B,MAAI8Y,WAAWgJ;AACP,UAAA,IAAIxjB,MAAM,yCAAyC;AAG3D,SAAI0B,MAAM8Y,UACD,oBAGL9Y,MAAM8hB,UACD,sBAGF;AACT;AAEA,SAASsB,cAAc9O,MAAqB;AAC1C,MAAI,CAACA,KAAKd,SAAS,OAAOc,KAAK1O,UAAY;AAClC,WAAA;AAGT,QAAMqd,UAAUnd,KAAK6N,MAAOW,KAAK1O,UAAU0O,KAAKd,QAAS,GAAG;AAC5D,SAAO,IAAI2Q,kBAAS,QAAA,GAAGlB,OAAO,IAAI,GAAG,GAAG,CAAC;AAC3C;AAEA,SAAST,aAAajR,MAAa;AACjBJ,SAAAA,MAAAA,MAAM,CAACE,mBAAQ;AAAA,IAAC+S,UAAU;AAAA,EAAK,CAAA,CAAC,CAAC,EAClC;AAAA,IAAC7S,KAAAA;AAAAA,IAAKW,QAAQ;AAAA,EAAA,CAAK;AACpC;AAEA,SAAS8R,cAAcF,UAA2BjkB,QAAsB;AACtE,QAAMwkB,aAAaP,SAAS/N,OAAQxK,CAAAA,UAASA,MAAK7D,SAAS,OAAO;AAElE,MAAI,CAAC2c,WAAWpc;AACd;AAGF,QAAMsD,QAAQ1L,OAAO0L,QAAQ1L,OAAOuC,OAAOkiB,KAAKzkB,MAAM;AAEjD6Y,OAAAA,OAAO,2CAAsC,GAAG2L,WAAWpc,SAAS,IAAI,WAAW,OAAO,GAE/F6b,SAASxd,QAASsS,CAAY,YAAA;AACvB,SAAA,KAAKA,QAAQrH,GAAG,EAAE;AAAA,EAAA,CACxB;AACH;AC7XagT,MAAAA,qBAAuC,OAAO9kB,MAAMC,YAAY;AACrE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,EAAAA,IAAUH,SACtBgH,SAASD,UAAAA,GAETkU,UAAU,MAAMK,YAAwBtU,MAAM;AACpD7G,SAAOuC,MACLuY,QACG5V,IAAK0O,CAAAA,QAAQ,GAAGuG,YAAY,GAAGvG,IAAIpV,IAAI,OAAOoV,IAAIjH,eAAe,YAAY,EAAE,EAC/EpM,KAAK;AAAA,CAAI,CACd;AACF,GCXMokB,sBAA4C;AAAA,EAChDnmB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPF,UAAU;AAAA,EACVH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SAEtBuN,WAAW,MADFxG,UAAU,EACKwG,SAASC,KAAK;AAC5CrN,WAAOuC,MAAM6K,SAASlI,IAAK0O,SAAQA,IAAIpV,IAAI,EAAE+B,KAAK;AAAA,CAAI,CAAC,GAGvD,MAAMmkB,mBAAmB9kB,MAAMC,OAAO;AAAA,EAAA;AAE1C,GCXMyK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeXsa,gBAAsC;AAAA,EAC1CpmB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,mBAAmC;AAAA,MAEjD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GC9BMA,aAAW;AAAA;AAAA;AAAA,GAKXua,kBAAwC;AAAA,EAC5CrmB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,qBAAqC;AAAA,MAEnD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GCIMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GA0BXwa,yBAA4D;AAAA,EAChEtmB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA;AAAA,EAEbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,QAAUH,SACtB;AAAA,MAACoZ;AAAAA,MAASgJ;AAAAA,MAAS8C;AAAAA,MAAO1f;AAAAA,MAAIC;AAAAA,IAAAA,IAAW1F,KAAKQ,YAC9C,CAAC8S,IAAI,IAAItT,KAAKqJ,oBACd+b,WAAWplB,KAAKQ,WAAW6kB,OAC3Bpe,SAASvB,UAAUsB,YAAYqc,QAAQ/b,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAAU;AAE3E,QAAIqS,WAAWgJ;AACP,YAAA,IAAIxjB,MAAM,yCAAyC;AAG3D,QAAI4G,MAAM6N;AACF,YAAA,IAAIzU,MAAM,6CAA6C;AAG/D,QAAIwS,YAAmC;AAKvC,SAJIgI,WAAWgJ,aACbhR,YAAYgI,UAAU,oBAAoB,sBAGxC/F,MAAM;AACFgS,YAAAA,cAAcvkB,sBAAKL,QAAQnB,QAAQ4B,IAAOmS,GAAAA,IAAI,GAC9C7O,UAAU4gB,eAAAA,QAAMrc,MAAM,MAAM+G,cAAAA,QAAGwV,SAASD,aAAa,MAAM,CAAC,GAC5Dpd,SAAS,MAAMsd,eAAe/gB,SAAS4M,WAAWpK,MAAM;AAC9D7G,aAAOuC,MAAM8iB,iBAAiBvd,QAAQmJ,SAAS,CAAC;AAChD;AAAA,IAAA;AAIF,UAAMqU,QAAQjgB,MAAMkgB,KAAAA,KAAK,GACnBC,MAAMR,WAAW,UAAU,QAC3BS,UAAU9kB,cAAKJ,QAAAA,KAAKmlB,YAAG3Q,QAAAA,OAAAA,GAAU,cAAc,GAAGuQ,KAAK,IAAIE,GAAG,EAAE,GAChEvjB,YAAY+iB,WAAWC,eAAAA,QAAMhjB,YAAYD,KAAKC,WAC9C0jB,eAAgBtgB,MAAO,MAAMwB,OAAOa,YAAYrC,EAAE,KAAO;AAAA,MAACD,KAAKkgB;AAAAA,MAAOxb,OAAO;AAAA,IAAY;AACzF6F,UAAAA,cAAAA,QAAGtO,MAAMV,sBAAKJ,KAAKmlB,YAAAA,QAAG3Q,OAAO,GAAG,YAAY,GAAG;AAAA,MAACzT,WAAW;AAAA,IAAA,CAAK,GACtE,MAAMqO,sBAAG5N,UAAU0jB,SAASxjB,UAAU0jB,cAAc,MAAM,CAAC,GAAG,MAAM;AAEpE,UAAMC,SAASC,UAAU;AACrBd,aAEFe,uBAAuBL,OAAO,GAC9BzlB,OAAOuC,MAAM,eAAekjB,OAAO,EAAE,GACrCzlB,OAAOuC,MAAM,gDAAgD,GAC7DvC,OAAOuC,MAAM,kDAAkD,GAC/DwjB,0BAAShB,MAAMU,OAAO,EAAExW,GAAG,UAAU,OACnCjP,OAAOuC,MAAM,EAAE,GACRyjB,8BAA8BP,OAAO,EAC7C,GACDQ,eAAAA,QAAML,OAAOM,KAAKN,OAAOhmB,KAAKumB,OAAOV,OAAO,GAAG;AAAA,MAACW,OAAO;AAAA,IAAA,CAAU,MAGjEH,eAAAA,QAAMnlB,KAAK8kB,OAAOM,KAAKN,OAAOhmB,KAAKumB,OAAOV,OAAO,GAAG;AAAA,MAACW,OAAO;AAAA,IAAU,CAAA,GACtE,MAAMJ,8BAA8BP,OAAO,GAC3C,MAAM9V,cAAAA,QAAG0W,OAAOZ,OAAO,EAAErE,MAAMxB,cAAAA,OAAI;AAGrC,mBAAeoG,8BAA8BM,UAAkB;AACzDjiB,UAAAA;AACA,UAAA;AACFA,kBAAU4gB,uBAAMrc,MAAM,MAAM+G,sBAAGwV,SAASmB,UAAU,MAAM,CAAC;AAAA,eAClDvmB,KAAK;AACZC,eAAOC,MAAM,yBAAyBF,IAAIuC,OAAO,EAAE;AACnD;AAAA,MAAA;AAGEikB,UAAAA,iBAAAA,QAAQliB,SAASshB,YAAY,GAAG;AAClC3lB,eAAOuC,MAAM,oCAAoC,GACjDvC,OAAOuC,MAAM,sCAAsC;AACnD;AAAA,MAAA;AAGE,UAAA;AACF,cAAMikB,cAAc,MAAMpB,eAAe/gB,SAAS4M,WAAWpK,MAAM;AACnE7G,eAAOuC,MAAM8iB,iBAAiBmB,aAAavV,SAAS,CAAC;AAAA,eAC9ClR,KAAK;AACZC,eAAOC,MAAM,8BAA8BF,IAAIuC,OAAO,EAAE,GACpDvC,IAAIuC,QAAQgW,SAAS,gBAAgB,KACvCtY,OAAOC,MAAM,qDAAqD;AAAA,MAAA;AAAA,IAEtE;AAAA,EACF;AAEJ;AAEA,SAAS6lB,uBAAuBL,SAAiB;AACvCxW,UAAAA,GAAG,UAAU,YAAY;AACzBU,UAAAA,cAAAA,QAAG0W,OAAOZ,OAAO,EAAErE,MAAMxB,cAAAA,OAAI,GAEnCzgB,QAAQ0M,KAAK,GAAG;AAAA,EAAA,CACjB;AACH;AAEA,SAASuZ,eACPqB,WACAxV,WACApK,QACA;AACA,QAAM6f,OAAOC,MAAMC,QAAQH,SAAS,IAAIA,YAAY,CAACA,SAAS;AAC9D,MAAIC,KAAKte,WAAW;AACZ,UAAA,IAAI3J,MAAM,uBAAuB;AAGzC,QAAMooB,YAAYH,KAAKxhB,IAAI,CAACuQ,KAAKzP,UAAoB;AAEnD,QADA8gB,iBAAiBrR,KAAKzP,OAAO0gB,IAAI,GAC7BzV,cAAc;AACT,aAAA;AAAA,QAACpD,QAAQ4H;AAAAA,MAAG;AAGrB,QAAIxE,cAAc,qBAAqB;AACrC,UAAI8V,2BAA2BtR,GAAG;AACzB,eAAA;AAAA,UAACuR,mBAAmBvR;AAAAA,QAAG;AAGhC,YAAM,IAAIhX,MAAM,sCAAsCwS,SAAS,EAAE;AAAA,IAAA;AAGnE,QAAIA,cAAc,mBAAmB;AACnC,UAAI8V,2BAA2BtR,GAAG;AACzB,eAAA;AAAA,UAAC5L,iBAAiB4L;AAAAA,QAAG;AAG9B,YAAM,IAAIhX,MAAM,sCAAsCwS,SAAS,EAAE;AAAA,IAAA;AAGnE,UAAM,IAAIxS,MAAM,yBAAyBwS,SAAS,EAAE;AAAA,EAAA,CACrD;AAED,SAAOpK,OAAO+C,YAAYid,SAAS,EAAE9c,OAAO;AAC9C;AAEA,SAAS+c,iBAAiBrR,KAAczP,OAAeihB,KAAgB;AAC/DC,QAAAA,WAAWD,IAAI7e,WAAW;AAE5B,MAAA,CAAC+e,+BAAc1R,GAAG;AACpB,UAAM,IAAIhX,MAAM2oB,gBAAgB,qBAAqBphB,OAAOkhB,QAAQ,CAAC;AAGnE,MAAA,CAACG,oBAAoB5R,GAAG;AAC1B,UAAM,IAAIhX,MAAM2oB,gBAAgB,+CAA+CphB,OAAOkhB,QAAQ,CAAC;AAEnG;AAEA,SAASG,oBAAoB5R,KAAsC;AAE/DA,SAAAA,QAAQ,QACR,OAAOA,OAAQ,YACf,WAAWA,OACX,OAAQA,IAAY3L,SAAU;AAElC;AAEA,SAASid,2BAA2BtR,KAAmD;AAC9E4R,SAAAA,oBAAoB5R,GAAG,KAAK,SAASA;AAC9C;AAEA,SAAS2R,gBAAgB9kB,SAAiB0D,OAAekhB,UAA2B;AAClF,SAAOA,WAAW,YAAY5kB,OAAO,KAAK,qBAAqB0D,KAAK,IAAI1D,OAAO;AACjF;AAEA,SAAS+iB,iBACPvd,QACAmJ,WACQ;AACR,QAAMqW,SAAS;AAAA;AACf,MAAIrW,cAAc;AACT,WAAA;AAAA,MAAkBnJ,OAAOyf,QAAQriB,IAAKsiB,CAAAA,QAAQA,IAAIniB,EAAE,EAAE9E,KAAK+mB,MAAM,CAAC;AAG3E,MAAIrW,cAAc;AACT,WAAA;AAAA,MAAiBnJ,OAAOyf,QAAQriB,IAAKsiB,CAAAA,QAAQA,IAAIniB,EAAE,EAAE9E,KAAK+mB,MAAM,CAAC;AAI1E,QAAMG,UAAoB,IACpBC,UAAoB,CAAE;AAC5B,aAAWF,OAAO1f,OAAOyf;AACnBC,QAAIvW,cAAc,WACpByW,QAAQlkB,KAAKgkB,IAAIniB,EAAE,IAEnBoiB,QAAQjkB,KAAKgkB,IAAIniB,EAAE;AAIvB,SAAIoiB,QAAQrf,SAAS,KAAKsf,QAAQtf,SAAS,IAClC,CACL;AAAA,MAAiBqf,QAAQlnB,KAAK+mB,MAAM,CAAC,IACrC,4BAA4BA,MAAM,GAAGI,QAAQnnB,KAAK+mB,MAAM,CAAC,EAAE,EAC3D/mB,KAAK;AAAA;AAAA,CAAM,IACJknB,QAAQrf,SAAS,IACnB;AAAA,MAAiBqf,QAAQlnB,KAAK+mB,MAAM,CAAC,KAGvC;AAAA,MAAkCI,QAAQnnB,KAAK+mB,MAAM,CAAC;AAC/D;AAEA,SAASzB,YAAY;AACnB,QAAM8B,gBAAgB,OAAO7a,KAAK3N,QAAQyoB,QAAQ,IAAI,YAAY,OAG5DhoB,QADST,QAAQC,IAAIyoB,UAAU1oB,QAAQC,IAAI0oB,UAAUH,eACvC3H,MAAM,KAAK;AAExB,SAAA;AAAA,IAACkG,KADItmB,KAAKmoB,MAAAA,KAAW;AAAA,IACfnoB;AAAAA,EAAI;AACnB;ACpQA,MAAM0K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAwBX0d,yBAA4D;AAAA,EAChExpB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACyF;AAAAA,IAAO,IAAI1F,KAAKQ,YACjB6nB,MAAMroB,KAAKqJ,mBAAmB/D,IAAKiB,CAAAA,QAAQ,GAAGA,GAAG,EAAE;AAEzD,QAAI,CAAC8hB,IAAI7f;AACD,YAAA,IAAI3J,MAAM,+BAA+B;AAGjD,UAAMoI,SAASvB,UAAUsB,UAAYqc,EAAAA,MAAAA,EAAQ/b,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAE3DgD,GAAAA,cAAcqe,IAAIrkB,OAAO,CAACskB,KAAK7iB,OAAO6iB,IAAIvI,OAAOta,EAAE,GAAGwB,OAAO+C,aAAa;AAC5E,QAAA;AACI,YAAA;AAAA,QAAC2d;AAAAA,MAAO,IAAI,MAAM3d,YAAYG,OAAO,GACrCoe,UAAUZ,QAAQrR,OAAQsR,CAAAA,QAAQA,IAAIvW,cAAc,QAAQ,EAAE/L,IAAKsiB,CAAQA,QAAAA,IAAIniB,EAAE,GACjF+iB,WAAWH,IAAI/R,OAAQ7Q,CAAAA,OAAO,CAAC8iB,QAAQ7P,SAASjT,EAAE,CAAC;AACrD8iB,cAAQ/f,SAAS,KACnBpI,OAAOuC,MAAM,WAAW4lB,QAAQ/f,MAAM,IAAIigB,2BAAU,YAAYF,QAAQ/f,MAAM,CAAC,EAAE,GAG/EggB,SAAShgB,SAAS,KACpBpI,OAAOC,MACLuC,OAAMwF,IAAI,GAAGqgB,mBAAAA,QAAU,YAAYD,SAAShgB,MAAM,CAAC,eAAeggB,SAAS7nB,KAAK,IAAI,CAAC,EAAE,CACzF;AAAA,aAEKR,KAAK;AACZ,YAAM,IAAItB,MAAM,oBAAoB4pB,2BAAU,YAAYJ,IAAI7f,MAAM,CAAC;AAAA,EAAMrI,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC1F;AAEJ,GC5DMgmB,iBAA4C;AAAA,EAChD9pB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCIMke,WAAYC,CAAwBA,QAAAA;AAE1BC,SAAAA,aAAa3M,OAAgBtZ,QAA2C;AACtF,QAAMkmB,aAA0E;AAAA,IAC9EC,YAAYnmB,OAAMomB;AAAAA,IAClBC,KAAKrmB,OAAMomB;AAAAA,IACXE,QAAQtmB,OAAMgM;AAAAA,IACdua,QAAQvmB,OAAMqW;AAAAA,IACdmQ,SAASxmB,OAAMoS;AAAAA,IACfqU,YAAYV;AAAAA,EAAAA,GAGRlgB,OAAOrG,KAAKC,UAAU6Z,OAAO,MAAM,CAAC;AAE1C,SAAOoN,kBAAAA,QAAS7gB,IAAI,EACjBnD,IAAI,CAAC6I,OAAOzH,GAAG2gB,QAA4B;AAE1C,UAAMkC,YAAY7iB,MAAM,IAAIyH,QAAQkZ,IAAI3gB,IAAI,CAAC;AAE3CyH,WAAAA,MAAMlG,SAAS,YACfshB,UAAUthB,SAAS,gBACnB,UAAUiF,KAAKqc,UAAUjhB,KAAK,IAEvB;AAAA,MAAC,GAAG6F;AAAAA,MAAOlG,MAAM;AAAA,IAAA,IAGnBkG;AAAAA,EACR,CAAA,EACA7I,IAAK6I,CAAAA,WACc2a,WAAW3a,MAAMlG,IAAI,KAAK0gB,UAC3Bxa,MAAMoS,GAAG,CAC3B,EACA5f,KAAK,EAAE;AACZ;ACxCA,MAAM+J,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAoBX8e,sBAA8D;AAAA,EAClE5qB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACwpB;AAAAA,MAAQ/jB;AAAAA,IAAW1F,IAAAA,KAAKQ,YACzB,CAACklB,KAAK,IAAI1lB,KAAKqJ,mBAAmB/D,IAAKiB,CAAAA,QAAQ,GAAGA,GAAG,EAAE;AAE7D,QAAI,CAACmf;AACG,YAAA,IAAI7mB,MAAM,+BAA+B;AAGjD,UAAMoI,SAASvB,UAAUsB,UAAYqc,EAAAA,MAAAA,EAAQ/b,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAAU;AAEvE,QAAA;AACF,YAAM6O,MAAM,MAAM5O,OAAOa,YAAY4d,KAAK;AAC1C,UAAI,CAAC7P;AACH,cAAM,IAAIhX,MAAM,YAAY6mB,KAAK,YAAY;AAGxC/iB,aAAAA,MAAM8mB,SAASZ,aAAahT,KAAKjT,MAAK,IAAIR,KAAKC,UAAUwT,KAAK,MAAM,CAAC,CAAC;AAAA,aACtE1V,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA8BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC7D;AAEJ,GC9CMwJ,oBAAoB,eAEpBxB,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oDAQmCwB,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyBrE,IAAe,wBAAA;AAAA,EACbtN,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,YACkB;AAEZ,UAAA;AAAA,MACJwpB;AAAAA,MACA/jB;AAAAA,MACAgkB;AAAAA,MACAC;AAAAA,MACA,eAAetiB;AAAAA,IAAAA,IACb,MAAMoN,gBAAczU,IAAI,GACtB;AAAA,MAACgH;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAOgnB;AAAAA,IAAa3pB,IAAAA,SACxC,CAACqQ,KAAK,IAAItQ,KAAKqJ;AAErB,QAAI,CAACiH;AACG,YAAA,IAAIzR,MAAM,yBAAyB;AAGtCwI,kBACHjH,OAAO0L,KAAKlJ,OAAMqW,OAAO,wCAAwC/M,iBAAiB,IAAI,CAAC;AAGzF,UAAM2d,iBAAiB,CAACnkB,SAClByB,iBAAiB,CAACuiB,SAClBxiB,cAAc,CAACyiB;AAEjBxiB,QAAAA,kBAAkB,CAACyiB,WAAWE,KAAKnkB;AAC/B,YAAA,IAAI9G,MACR,qFACF;AAGEgrB,QAAAA,kBAAkB,CAACD,WAAWE,KAAKpkB;AAC/B,YAAA,IAAI7G,MACR,qFACF;AAGF,UAAMkrB,aAAa/iB,UAAU;AAAA,MAACG;AAAAA,MAAgBD;AAAAA,IAAAA,CAAY,EAAEmc,MAAAA,GACtD;AAAA,MAAC3d,SAASskB;AAAAA,MAAiBrkB,WAAWskB;AAAAA,QAAqBF,WAAWziB,OAAAA,GAEtEL,SAAS8iB,WAAWziB,OAAO;AAAA,MAC/B3B,WAAW+jB,WAAWO;AAAAA,MACtBvkB,SAASA,WAAWskB;AAAAA,MACpB3iB,YAAYA,cAAc6E;AAAAA,IAAAA,CAC3B;AAEG,QAAA;AACF,YAAM4a,OAAO,MAAM7f,OAAOe,MAAMsI,KAAK;AACrC,UAAI,CAACwW;AACG,cAAA,IAAIjoB,MAAM,2BAA2B;AAGtC8D,aAAAA,MAAM8mB,SAASZ,aAAa/B,MAAMlkB,MAAK,IAAIR,KAAKC,UAAUykB,MAAM,MAAM,CAAC,CAAC;AAAA,aACxE3mB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAyBsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EACxD;AAEJ;AAEA,SAAS+R,gBAAczU,MAAiD;AAEhEkqB,QAAAA,qBAAqB3qB,QAAQC,IAAI2qB;AACvC,SAAOzV,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrD2W,OAAO,UAAU;AAAA,IAAC1T,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAM,EAClD2Q,OAAO,WAAW;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAS,EAClC0T,OAAO,WAAW;AAAA,IAAC1T,MAAM;AAAA,EAAA,CAAS,EAClC0T,OAAO,aAAa;AAAA,IAAC1T,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAM,EACrD2Q,OAAO,eAAe;AAAA,IAAC1T,MAAM;AAAA,IAAU+C,SAASkf;AAAAA,EAAmB,CAAA,EAAE9gB;AAC1E;ACnHA,MAAMqB,gBAAc,iEAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAyBX0f,6BAAiD;AAAA,EACrDxrB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,sBAAyC;AAAA,EAAA,CAAA,GAEvD+K,QAAQhL,MAAMC,OAAO;AAEpC,GCtCMyK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAuBJ2f,cAAoC;AAAA,EAC/CzrB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,iBAA+B;AAAA,EAAA,CAAA,GAE7C+K,QAAQhL,MAAMC,OAAO;AAEpC,GC3BMyK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeX4f,0BAAgD;AAAA,EACpD1rB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAkDC,aACnD,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,sBAAuC;AAAA,MAErD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GChCMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgCX6f,0BAAgD;AAAA,EACpD3rB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXK,OAAO;AAAA,EACPH,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAyBC,aAC1B,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,sBAAuC;AAAA,MAErD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GC3CM8f,eAA0C;AAAA,EAC9C5rB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCDMC,aAAW;AAAA;AAAA;AAAA,GAKX+f,yBAA+C;AAAA,EACnD7rB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXK,OAAO;AAAA,EACPH,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,qBAAsC;AAAA,MAEpD+K,QAAQhL,MAAMC,OAAO;AAAA,EAElCyK,UAAAA;AACF,GCtBMggB,oBAA0C;AAAA,EAC9C9rB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtBgH,SAASD,aAET;AAAA,MAACrB;AAAAA,IAAAA,IAAasB,OAAOK,OAAO;AAClC,QAAI,CAAC3B;AACG,YAAA,IAAI9G,MAAM,qBAAqB;AAKvC,UAAM8rB,YAAY,wCAFG,MAAM1jB,OAAO2jB,SAASC,QAAQllB,SAAS,KAAM,CAAA,GAC/BmlB,kBAAkB,UACkB,YAAYnlB,SAAS;AAE5FvF,WAAOuC,MAAM,WAAWgoB,SAAS,EAAE,GACnCI,sBAAKJ,SAAS;AAAA,EAAA;AAElB,GCrBMK,oBAA0C;AAAA,EAC9CpsB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,IAAa/G,IAAAA,SACd,CAACrB,IAAI,IAAIoB,KAAKqJ,oBACdpC,SAASD,UAAAA,GAETikB,SAAS,MAAMC,gBAActsB,MAAMqB,OAAO;AAC5C,QAAA;AACIgH,YAAAA,OACHoc,MAAM,EACN/b,OAAO;AAAA,QAACD,YAAY;AAAA,MAAa,CAAA,EACjCiH,QAAQ;AAAA,QAACC,QAAQ;AAAA,QAAUG,KAAK,UAAUuc,MAAM;AAAA,MAAA,CAAG;AAAA,aAC/C9qB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA0BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EACzD;AAEJ;AAEA,eAAewoB,gBAAcjR,WAA+Bha,SAA4B;AACtF,QAAMkrB,gBAAgBlR,aAAaA,UAAUjN,YAAAA,GACvC;AAAA,IAACtB;AAAAA,IAAQ1E;AAAAA,EAAAA,IAAa/G,SAGtBmrB,QAAQ,MAFCpkB,YAGZqc,QACA/b,OAAO;AAAA,IAACD,YAAY;AAAA,EAAa,CAAA,EACjCiH,QAAgB;AAAA,IAACI,KAAK;AAAA,IAAUjG,MAAM;AAAA,EAAA,CAAK;AAE9C,MAAI0iB,eAAe;AACXvd,UAAAA,WAAWwd,MAAM9U,OAAQ+U,CAASA,SAAAA,KAAKzsB,KAAKoO,YAAY,MAAMme,aAAa,EAAE,CAAC;AACpF,QAAI,CAACvd;AACH,YAAM,IAAI/O,MAAM,mBAAmBob,SAAS,aAAa;AAG3D,WAAOrM,SAASnI;AAAAA,EAAAA;AAGZoI,QAAAA,UAAUud,MAAM9lB,IAAK+lB,CAAU,UAAA;AAAA,IAAC/iB,OAAO+iB,KAAK5lB;AAAAA,IAAI7G,MAAMysB,KAAKzsB;AAAAA,EAAAA,EAAM;AACvE,SAAO8M,OAAOM,OAAO;AAAA,IACnBtJ,SAAS;AAAA,IACTuF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;AClDA,MAAMyd,YAAuC;AAAA,EAC3C1sB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCHM8gB,0BAAgD;AAAA,EACpD3sB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtB,CAACurB,SAAS,IAAIxrB,KAAKqJ,oBACnBpC,SAASD,UAAU;AAErBykB,QAAAA;AACA,QAAA;AACQ,gBAAA,MAAMxkB,OAAOqH,QAAyB;AAAA,QAACI,KAAK,mBAAmB8c,SAAS;AAAA,MAAA,CAAG;AAAA,aAC9ErrB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAmCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG5D,UAAA;AAAA,MAACX;AAAAA,MAAW2pB;AAAAA,MAAYC;AAAAA,MAAYC;AAAAA,MAAeC;AAAAA,IAAAA,IAAcJ;AAUvE,QARArrB,OAAOuC,MAAM,SAASZ,SAAS,EAAE,GACjC3B,OAAOuC,MAAM,WAAWmpB,UAAUL,OAAO,CAAC,EAAE,GAC5CrrB,OAAOuC,MAAM,gBAAgB+oB,UAAU,EAAE,GAErCD,QAAQM,aACV3rB,OAAOuC,MAAM,YAAYqpB,cAAcP,OAAO,CAAC,EAAE,GAG/C,CAACI,eAAe,CAACD,iBAAiBA,kBAAkB,SAAS;AAC/D,YAAMlf,OAAOif,aAAa;AAAA;AAAA,EAAUA,UAAU;AAAA;AAAA,IAAY;AACnDhpB,aAAAA,MAAM,kBAAkB+J,IAAI,EAAE;AAAA,IAAA;AAAA,EACvC;AAEJ;AAIO,SAASsf,cACdP,SACAre,UAAmC,IAC3B;AACF,QAAA;AAAA,IAAC6e;AAAAA,MAAe7e,SAChB;AAAA,IAAC3H;AAAAA,IAAImmB;AAAAA,IAAeF;AAAAA,EAAAA,IAAcD,SAClCS,OAAOD,cAAc,8BAA8BxmB,EAAE,oBAAoB;AAC/E,UAAQmmB,eAAa;AAAA,IACnB,KAAK;AACI,aAAA,QAAQF,UAAU,IAAIQ,IAAI;AAAA,IACnC,KAAK;AACI,aAAA;AAAA,IACT,KAAK;AACI,aAAA;AAAA,EAET;AAGK,SAAA;AACT;AAEO,SAASJ,UAAUL,SAAkC;AAC1D,SAAIA,QAAQM,YACH,WAGLN,QAAQI,aACH,gBAGF;AACT;AC5DA,MAAMM,sBAA2D;AAAA,EAC/DvtB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,IAAAA,IAAa/G,SACdM,QAAQP,KAAKQ,YACb,CAAC5B,IAAI,IAAIoB,KAAKqJ,oBACdpC,SAASD,UAAU,GAEnBikB,SAAS,MAAMC,cAActsB,MAAMqB,OAAO;AAChD,QAAImsB,UACAC;AACA,QAAA;AACS,iBAAA,MAAMplB,OAAOqH,QAAuB;AAAA,QAACI,KAAK,UAAUuc,MAAM;AAAA,MAAY,CAAA,GACjFoB,WAAW,MAAMplB,OAAOqH,QAA2B;AAAA,QAACI,KAAK,UAAUuc,MAAM;AAAA,MAAA,CAAY;AAAA,aAC9E9qB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAgCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAGzD4pB,UAAAA,kBAAkBC,yBAAQF,UAAU,WAAW,GAC/CG,YAAYJ,SAAS9mB,IAAKuG,CAAsD,SAAA;AAAA,MACpF,GAAGA;AAAAA,MACHwgB,UAAUC,gBAAgBzgB,IAAIpG,EAAE;AAAA,IAChC,EAAA,GAEIgnB,gBAAgBL,SAAS5jB,SAAS;AAC9B3B,cAAAA,QAAQ,CAACnE,SAASgE,MAAM;AAChCgmB,mBAAahqB,SAASzC,SAAS;AAAA,QAAC0sB,UAAUpsB,MAAMosB;AAAAA,MAAS,CAAA,GACzDC,eAAe3sB,SAASwsB,kBAAkB/lB,CAAC;AAAA,IAAA,CAC5C;AAAA,EAAA;AAEL;AAIA,eAAewkB,cAAcjR,WAA+Bha,SAA4B;AACtF,QAAMkrB,gBAAgBlR,aAAaA,UAAUjN,YAAAA,GACvC;AAAA,IAACtB;AAAAA,IAAQ1E;AAAAA,EAAAA,IAAa/G,SAGtBmrB,QAAQ,MAFCpkB,YAGZqc,QACA/b,OAAO;AAAA,IAACD,YAAY;AAAA,EAAa,CAAA,EACjCiH,QAAgB;AAAA,IAACI,KAAK;AAAA,IAAUjG,MAAM;AAAA,EAAA,CAAK;AAE9C,MAAI0iB,eAAe;AACXvd,UAAAA,WAAWwd,MAAM9U,OAAQ+U,CAASA,SAAAA,KAAKzsB,KAAKoO,YAAY,MAAMme,aAAa,EAAE,CAAC;AACpF,QAAI,CAACvd;AACH,YAAM,IAAI/O,MAAM,mBAAmBob,SAAS,aAAa;AAG3D,WAAOrM,SAASnI;AAAAA,EAAAA;AAGlB,MAAI2lB,MAAM5iB,WAAW;AACb,UAAA,IAAI3J,MAAM,+BAA+B;AAGjD,MAAIusB,MAAM5iB,WAAW;AACZ4iB,WAAAA,MAAM,CAAC,EAAE3lB;AAGZoI,QAAAA,UAAUud,MAAM9lB,IAAK+lB,CAAU,UAAA;AAAA,IAAC/iB,OAAO+iB,KAAK5lB;AAAAA,IAAI7G,MAAMysB,KAAKzsB;AAAAA,EAAAA,EAAM;AACvE,SAAO8M,OAAOM,OAAO;AAAA,IACnBtJ,SAAS;AAAA,IACTuF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;AAEA,SAAS+e,eAAe3sB,SAA4B4sB,MAAe;AAC5DA,UACH5sB,QAAQG,OAAOuC,MAAM;AAAA,CAAO;AAEhC;AAEA,SAAS+pB,aACPhqB,SACAzC,SACAmN,SACA;AACM,QAAA;AAAA,IAACuf;AAAAA,MAAYvf,SACb;AAAA,IAAChN;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAExBG,SAAOuC,MAAM,SAASD,QAAQX,SAAS,EAAE,GACzC3B,OAAOuC,MAAM,WAAWD,QAAQyF,MAAM,EAAE,GACxC/H,OAAOuC,MAAM,gBAAgBD,QAAQgpB,UAAU,EAAE,GAE7ChpB,QAAQoqB,eAAe,KACzB1sB,OAAOuC,MAAM,aAAaD,QAAQoqB,YAAY,EAAE,GAG9CH,aACFvsB,OAAOuC,MAAM,UAAU,GACvBvC,OAAOuC,MAAMoqB,kBAAQ3qB,KAAK4G,MAAMtG,QAAQsqB,OAAO,GAAG;AAAA,IAACC,QAAQ;AAAA,EAAK,CAAA,CAAC,IAG/DN,YAAYjqB,QAAQ2pB,aACtBjsB,OAAOuC,MAAM,WAAW,GACxBD,QAAQ2pB,SAASxlB,QAAS4kB,CAAY,YAAA;AAEpC,UAAMyB,SAAS,MADFzB,QAAQ1pB,UAAUsX,QAAQ,WAAW,GAAG,CAC5B;AAEzB,QAAIoS,QAAQI;AACHlpB,aAAAA,MAAM,GAAGuqB,MAAM,IAAItqB,OAAMqW,OAAO,SAAS,CAAC,EAAE;AAAA,aAC1CwS,QAAQM,WAAW;AACtBoB,YAAAA,UAAUnB,cAAcP,SAAS;AAAA,QAACQ,aAAa;AAAA,MAAA,CAAK;AACnDtpB,aAAAA,MAAM,GAAGuqB,MAAM,IAAItqB,OAAMqW,OAAO,YAAYkU,OAAO,EAAE,CAAC,EAAE;AAAA,IACjE;AACSxqB,aAAAA,MAAM,GAAGuqB,MAAM,kBAAkBzB,QAAQC,UAAU,KAAKD,QAAQ2B,QAAQ,KAAK;AAAA,EAEvF,CAAA,IAIHhtB,OAAOuC,MAAM,EAAE;AACjB;AC/HA,MAAM0qB,mBAAyC;AAAA,EAC7CzuB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtBgH,SAASD,UAAU;AAErBokB,QAAAA;AACA,QAAA;AACFA,cAAQ,MAAMnkB,OACXoc,MAAM,EACN/b,OAAO;AAAA,QAACD,YAAY;AAAA,MAAa,CAAA,EACjCiH,QAAgB;AAAA,QAACI,KAAK;AAAA,MAAA,CAAS;AAAA,aAC3BvO,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAgCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG/D0oB,UAAMvkB,QAASwkB,CAAS,SAAA;AACtBjrB,aAAOuC,MAAM,SAAS0oB,KAAKzsB,IAAI,EAAE,GACjCwB,OAAOuC,MAAM,YAAY0oB,KAAK3lB,OAAO,EAAE,GACvCtF,OAAOuC,MAAM,QAAQ0oB,KAAKvZ,GAAG,EAAE,GAE3BuZ,KAAKpjB,SAAS,eAChB7H,OAAOuC,MAAM,gBAAgB0oB,KAAKiC,UAAU,EAAE,GAE1CjC,KAAK5gB,eACPrK,OAAOuC,MAAM,gBAAgB0oB,KAAK5gB,WAAW,EAAE,IAInDrK,OAAOuC,MAAM,EAAE;AAAA,IAAA,CAChB;AAAA,EAAA;AAEL,GCtCM8H,gBAAc,yEAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcX6iB,yBAA+C;AAAA,EACnD3uB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACF,qBAAAA;AAAAA,IAAAA,IAAuB,MAAM;;QAC9BytB,eAAe,MAAMztB,qBAAoBC,MAAMC,OAAO;AACxDutB,QAAAA;AACIA,YAAAA;AAEDA,WAAAA;AAAAA,EAAAA;AAEX;AChCA,IAAe,gBAAA;AAAA,EACb5uB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACLO,MAAMgjB,uBAAuB,cACvBC,8BAA8B,CAAC,OAAO,MAAM,MAAM,KAAK,GCDvDC,kBAAkBA,CAAC;AAAA,EAC9BC;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAOMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcvoB,IAAKwoB,CAAAA,MAAM1rB,KAAKC,UAAUyrB,CAAC,CAAC,EAAEntB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCjBKotB,gBAAgBA,CAAC;AAAA,EAC5BH;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA,YAGMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcvoB,IAAKwoB,CAAAA,MAAM1rB,KAAKC,UAAUyrB,CAAC,CAAC,EAAEntB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCbKqtB,cAAcA,CAAC;AAAA,EAC1BJ;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcvoB,IAAKwoB,CAAAA,MAAM1rB,KAAKC,UAAUyrB,CAAC,CAAC,EAAEntB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GChBKstB,aAAaA,CAAC;AAAA,EACzBL;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcvoB,IAAKwoB,CAAAA,MAAM1rB,KAAKC,UAAUyrB,CAAC,CAAC,EAAEntB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GChBKutB,cAAcA,CAAC;AAAA,EAC1BN;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcvoB,IAAKwoB,CAAAA,MAAM1rB,KAAKC,UAAUyrB,CAAC,CAAC,EAAEntB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCFF+J,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAYXyjB,YAAY,CAChB;AAAA,EAACvvB,MAAM;AAAA,EAA6CwvB,UAAUL;AAAa,GAC3E;AAAA,EAACnvB,MAAM;AAAA,EAAyBwvB,UAAUH;AAAU,GACpD;AAAA,EAACrvB,MAAM;AAAA,EAAkBwvB,UAAUJ;AAAW,GAC9C;AAAA,EAACpvB,MAAM;AAAA,EAAyCwvB,UAAUF;AAAW,GACrE;AAAA,EACEtvB,MAAM;AAAA,EACNwvB,UAAUT;AACZ,CAAC,GAGGU,yBAAqE;AAAA,EACzEzvB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQsL;AAAAA,MAAQpL;AAAAA,MAASsC,OAAAA;AAAAA,IAAAA,IAAS3C;AAErC,QAAA,CAACuX,KAAK,IAAIxX,KAAKqJ;AAEZ,WAAA,CAACmO,OAAO2E,KAAK;AACV,cAAA,MAAMzQ,OAAOM,OAAO;AAAA,QAC1B/D,MAAM;AAAA,QACNqmB,QAAQ;AAAA,QACR5rB,SAAS;AAAA,MAAA,CACV,GACI8U,MAAM2E,UACT/b,OAAOC,MAAMuC,OAAMwF,IAAI,sBAAsB,CAAC;AAG5C+X,UAAAA,SAAQ,MAAMzU,OAAOM,OAAO;AAAA,MAChC/D,MAAM;AAAA,MACNqmB,QAAQ;AAAA,MACR5rB,SAAS;AAAA,IAAA,CACV,GAEK6rB,kBAAkBC,OAAOC,YAAYN,UAAU7oB,IAAKwoB,CAAM,MAAA,CAACA,EAAElvB,MAAMkvB,CAAC,CAAC,CAAC,GACtEM,WAAW,MAAM1iB,OAAOM,OAAO;AAAA,MACnC/D,MAAM;AAAA,MACNvF,SAAS;AAAA,MACTmL,SAASsgB,UAAU7oB,IAAKopB,CAAqB,qBAAA;AAAA,QAC3C9vB,MAAM8vB,gBAAgB9vB;AAAAA,QACtB0J,OAAOomB,gBAAgB9vB;AAAAA,MAAAA,EACvB;AAAA,IAAA,CACH,GAEK+vB,cAAcC,gBAAAA,QAAOpX,MAAMxK,YAAY,CAAC,EAC3CqM,QAAQ,QAAQ,GAAG,EACnBA,QAAQ,eAAe,EAAE,GAEtBwV,UAAU9tB,cAAKJ,QAAAA,KAAKL,SAASmtB,sBAAsBkB,WAAW;AACpE,QAAInY,gBAAWqY,OAAO,KAElB,CAAE,MAAMnjB,OAAOM,OAAO;AAAA,MACpB/D,MAAM;AAAA,MACNvF,SAAS,uBAAuBE,OAAMgE,KAAKioB,OAAO,CAAC;AAAA,MACnD7jB,SAAS;AAAA,IAAA,CACV;AAED;AAGJqK,SAAAA,UAAUwZ,SAAS;AAAA,MAACntB,WAAW;AAAA,IAAA,CAAK;AAEpC,UAAMotB,oBAAoBP,gBAAgBH,QAAQ,EAAEA,YAAYL,eAAe;AAAA,MAC7EH,eAAepW;AAAAA,MACfqW,eAAe1N,OACZC,MAAM,GAAG,EACT9a,IAAKwoB,CAAMA,MAAAA,EAAE3R,MAAM,EACnB7F,OAAOyC,OAAO;AAAA,IAClB,CAAA,GAEKgW,iBAAiBhuB,cAAAA,QAAKJ,KAAKkuB,SAAS,UAAU;AAEpD,UAAM1sB,GAAAA,UAAU4sB,gBAAgBD,gBAAgB,GAEhD1uB,OAAOuC,MAAM,GACbvC,OAAOuC,MAAM,GAAGC,OAAMgM,MAAM,QAAG,CAAC,qBAAqB,GACrDxO,OAAOuC,MAAAA,GACPvC,OAAOuC,MAAM,aAAa,GAC1BvC,OAAOuC,MACL,QAAQC,OAAMoS,KACZ+Z,cACF,CAAC,6DACH,GACA3uB,OAAOuC,MACL;AAAA,IAAkCC,OAAMoS,KACtC,wBAAwB2Z,WAAW,6CACrC,CAAC,IACH,GACAvuB,OAAOuC,MACL;AAAA,KAAiDC,OAAMoS,KACrD,wBAAwB2Z,WAAW,yDACrC,CAAC,IACH,GACAvuB,OAAOuC,SACPvC,OAAOuC,MACL,+DAAwDC,OAAMoS,KAC5D,0DACF,CAAC,EACH;AAAA,EAAA;AAEJ;AC3FgBga,SAAAA,uBACd1uB,SACAstB,eAC2B;AAC3B,SAAO,CAACA,eAAe7sB,cAAAA,QAAKJ,KAAKitB,eAAe,OAAO,CAAC,EAAEqB,QAASC,CAAAA,aACjExB,4BAA4BpoB,IAAKsgB,CAAQ,QAAA;AACvC,UAAMuJ,eAAepuB,cAAAA,QAAKJ,KAAK8sB,sBAAsB,GAAGyB,QAAQ,IAAItJ,GAAG,EAAE,GACnEwJ,eAAeruB,cAAKL,QAAAA,QAAQJ,SAAS6uB,YAAY;AACnDE,QAAAA;AACA,QAAA;AAEFA,YAAMxgB,QAAQugB,YAAY;AAAA,aACnBjvB,KAAK;AACZ,UAAIA,IAAImM,SAAS;AACf,cAAM,IAAIzN,MAAM,UAAUsB,IAAIuC,OAAO,GAAG;AAAA,IAAA;AAGrC,WAAA;AAAA,MAACysB;AAAAA,MAAcC;AAAAA,MAAcC;AAAAA,IAAG;AAAA,EAAA,CACxC,CACH;AACF;AASO,SAASC,0BACdC,QAC6C;AACzC,MAAA,OAAOA,OAAOF,MAAQ,OAAe,CAAC9H,uBAAAA,QAAcgI,OAAOF,IAAIrkB,OAAO;AACjE,WAAA;AAGHqkB,QAAAA,MAAME,OAAOF,IAAIrkB;AACvB,SAAO,OAAOqkB,IAAI7X,SAAU,YAAY6X,IAAIG,YAAYxhB;AAC1D;AClEA,MAAMtD,aAAW,IAEX+kB,uBAA6C;AAAA,EACjD7wB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO6kB,GAAGzvB,YAAY;AACtB,UAAA;AAAA,MAACK;AAAAA,MAASF;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C;AAC7B,QAAA;AACI0vB,YAAAA,aAAa,MAAMC,kBAAkBtvB,OAAO;AAE9CqvB,UAAAA,WAAWnnB,WAAW,GAAG;AAC3BpI,eAAOuC,MAAM,yDAAyD,GACtEvC,OAAOuC,MACL;AAAA,MAASC,OAAMgM,MAAM,kCAAoC,CAAC,4BAC5D;AACA;AAAA,MAAA;AAGIyI,YAAAA,QAAQ,IAAIC,0BAAM;AAAA,QACtBE,OAAO,SAASmY,WAAWnnB,MAAM;AAAA,QACjC+O,SAAS,CACP;AAAA,UAAC3Y,MAAM;AAAA,UAAM4Y,OAAO;AAAA,UAAMC,WAAW;AAAA,QAAA,GACrC;AAAA,UAAC7Y,MAAM;AAAA,UAAS4Y,OAAO;AAAA,UAASC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAErD;AAEDkY,iBAAW9oB,QAASgpB,CAAqB,qBAAA;AACvCxY,cAAMK,OAAO;AAAA,UAACjS,IAAIoqB,iBAAiBpqB;AAAAA,UAAI+R,OAAOqY,iBAAiBC,UAAUtY;AAAAA,QAAAA,CAAM;AAAA,MAAA,CAChF,GACDH,MAAMQ,cACNzX,OAAOuC,MAAM,sDAAsD;AAAA,aAC5DtC,OAAO;AACVA,UAAAA,MAAMiM,SAAS,UAAU;AAC3BlM,eAAOuC,MAAM,2CAA2C,GACxDvC,OAAOuC,MACL;AAAA,MAASC,OAAMgM,MAAM,kCAAoC,CAAC,4BAC5D;AACA;AAAA,MAAA;AAEF,YAAM,IAAI/P,MAAM,+CAA+CwB,MAAMqC,OAAO,EAAE;AAAA,IAAA;AAAA,EAChF;AAEJ;AAmBA,eAAsBktB,kBAAkBtvB,SAA+C;AACjFyvB,MAAAA;AAEFA,eAAaC,KAAAA,SAAS;AAAA,IACpBvN,QAAQ,OAAOljB,QAAQuC,QAAQkD,MAAM,CAAC,CAAC;AAAA,IACvCirB,WAAW;AAAA,MAAC,kBAAkB;AAAA,IAAA;AAAA,EAC/B,CAAA,EAAEF;AAGCG,QAAAA,gBAAgBnvB,sBAAKJ,KAAKL,SAASmtB,oBAAoB,GACvD0C,mBAAmB,MAAMC,GAAAA,QAAQF,eAAe;AAAA,IAACG,eAAe;AAAA,EAAA,CAAK,GAErEV,aAAkC,CAAC;AACzC,aAAWW,SAASH,kBAAkB;AACpC,UAAMI,YAAYD,MAAMpN,YAAgBoN,IAAAA,MAAM1xB,OAAO4xB,+BAA+BF,MAAM1xB,IAAI,GACxF6xB,aAAazB,uBAAuB1uB,SAASiwB,SAAS,EAAEja,OAAOgZ,yBAAyB;AAE9F,eAAWoB,aAAaD;AACtBd,iBAAW/rB,KAAK;AAAA,QACd6B,IAAI8qB;AAAAA,QACJT,WAAWY,UAAUrB,IAAIrkB;AAAAA,MAAAA,CAC1B;AAAA,EAAA;AAID+kB,SAAAA,cACFA,cAGKJ;AACT;AAEA,SAASa,+BAA+Bze,UAAkB;AAExD,SAAO2b,4BAA4B1pB,OACjC,CAACpF,MAAMgnB,QAAShnB,KAAK+xB,SAAS,IAAI/K,GAAG,EAAE,IAAI7kB,cAAAA,QAAKoR,SAASvT,MAAM,IAAIgnB,GAAG,EAAE,IAAIhnB,MAC5EmT,QACF;AACF;ACjHA,IAAe,iBAAA;AAAA,EACbnT,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,SAASmmB,aAAa7vB,OAAoB;AACpC,MAAA,CAACgmB,MAAMC,QAAQjmB,KAAI;AACf,UAAA,IAAIlC,MAAM,sBAAsB;AAGxC,SAAOkC,MAAKiD,OAAe,CAACye,QAAQoO,SAASnqB,MAAM;AACjD,QAAIoqB,MAAAA,eAAeD,OAAO;AACjB,aAAA,GAAGpO,MAAM,IAAIoO,OAAO;AAGzBE,QAAAA,mBAAaF,OAAO,KAAKA,QAAQG;AACnC,aAAO,GAAGvO,MAAM,WAAWoO,QAAQG,IAAI;AAGrCC,QAAAA,MAAAA,aAAaJ,OAAO,GAAG;AACnB,YAAA,CAACK,MAAMC,EAAE,IAAIN;AACnB,aAAO,GAAGpO,MAAM,IAAIyO,IAAI,IAAIC,EAAE;AAAA,IAAA;AAGhC,QAAI,OAAON,WAAY;AAEd,aAAA,GAAGpO,MAAM,GADE/b,MAAM,IAAI,KAAK,GACL,GAAGmqB,OAAO;AAGxC,UAAM,IAAIhyB,MAAM,8BAA8BuD,KAAKC,UAAUwuB,OAAO,CAAC,IAAI;AAAA,KACxE,EAAE;AACP;AAgBO,MAAMO,eAAeA,CAACC,WAA2C,CAAIC,GAAAA,QAAQ,MAC3E9C,OAAO+C,QAAQF,QAAQ,EAC3B/rB,IAAI,CAAC,CAAC2jB,KAAKuI,KAAK,MACfnrB,KAAKJ,IAAIgjB,IAAIzgB,SAAS8oB,QAAQ,GAAGF,aAAaI,MAAMH,UAAUC,QAAQ,CAAC,CAAC,CAC1E,EACCttB,OAAO,CAACiC,KAAK8X,SAAUA,OAAO9X,MAAM8X,OAAO9X,KAAM,CAAC,GAc1CwrB,aAAaA,CAAwB;AAAA,EAChDC,MAAAA,QAAO,CAAC;AAAA,EACRC;AAAAA,EACAC,QAAAA,UAAS;AAAA,EACTC,UAAUC,YAAYA,CAAC;AAAA,IAACC;AAAAA,EAAAA,MAAWA;AAAAA,EACnCC;AACa,MAAc;AACrBT,QAAAA,UAAU/C,OAAO+C,QAAQG,KAAI;AAEnC,SAAOH,QACJjsB,IAAI,CAAC,CAAC2jB,KAAKuI,KAAK,GAAGprB,UAAU;AAC5B,UAAM6rB,SAAS7rB,UAAUmrB,QAAQ/oB,SAAS,GACpC0pB,aAAa,GAAGN,OAAM,GAAGK,SAAS,OAAO,SAAI,IAC7CE,SAASL,UAAUN,KAAK,GAExBY,SAASX,WAAW;AAAA,MACxBC,MAAMF,MAAMH;AAAAA,MACZM;AAAAA,MACAC,QAAQM;AAAAA,MACRL,UAAUC;AAAAA,MACVE;AAAAA,IAAAA,CACD;AAED,QAAI,CAACG,QAAQ3pB;AAEX,aAAO,CADS,GAAGopB,OAAM,GAAGK,SAAS,WAAM,QAAG,UAAKhJ,GAAG,IACrCmJ,MAAM,EAAE9b,OAAOyC,OAAO,EAAEpY,KAAK;AAAA,CAAI;AAG9C,UAAA,CAAC0xB,OAAO,GAAGC,IAAI,IAAIH,QACnBI,eAAe,IAAIC,OAAOb,gBAAgBC,QAAOppB,SAASygB,IAAIzgB,MAAM,GACpEiqB,QAAQR,SAAS,WAAM,UACvBS,oBAAoB,IAAIF,OAAOb,gBAAgBC,QAAOppB,SAAS,CAAC,GAEhEmqB,eAAe,GAAGf,OAAM,GAAGa,KAAK,UAAKxJ,GAAG,IAAIsJ,YAAY,IAAIP,WAAWK,KAAK,CAAC,IAC7EO,qBAAqBN,KACxBhtB,IAAKutB,CAAAA,WAAW,GAAGX,UAAU,GAAGQ,iBAAiB,IAAIV,WAAWa,MAAM,CAAC,EAAE,EACzElyB,KAAK;AAAA,CAAI;AAGL,WAAA,CADS,CAACgyB,cAAcC,kBAAkB,EAAEtc,OAAOyC,OAAO,EAAEpY,KAAK;AAAA,CAAI,GAC3DyxB,MAAM,EAAE9b,OAAOyC,OAAO,EAAEpY,KAAK;AAAA,CAAI;AAAA,EACnD,CAAA,EACAA,KAAK;AAAA,CAAI;AACd;AAMO,SAASmyB,cAA2Cf,OAA2B;AACpF,QAAMgB,OAAmB,CAAC;AAGjBC,WAAAA,QAAQtB,OAAYuB,OAAmBF,MAAM;AAEhD,QAAA,CAACrB,MAAK3wB,KAAKyH,QAAQ;AAChByqB,WAAKlB,UAAOkB,KAAKlB,QAAQ,CAAA,IAG9BkB,KAAKlB,MAAMnuB,KAAK8tB,KAAI;AACpB;AAAA,IAAA;AAGI,UAAA,CAACvrB,SAAS,GAAGmsB,IAAI,IAAIZ,MAAK3wB,MAC1BkoB,MAAM2H,aAAa,CAACzqB,OAAO,CAAC;AAG7B8sB,SAAK5B,aAAU4B,KAAK5B,WAAW,CAC9BpI,IAAAA,OAAOgK,KAAK5B,aAAW4B,KAAK5B,SAASpI,GAAG,IAAI,KAElD+J,QAAQ;AAAA,MAAC,GAAGtB;AAAAA,MAAM3wB,MAAMuxB;AAAAA,IAAAA,GAAOW,KAAK5B,SAASpI,GAAG,CAAC;AAAA,EAAA;AAGxCyI,aAAAA,SAAQK,MAAOiB,SAAQtB,KAAI;AAC/BqB,SAAAA;AACT;AC/HA,MAAMG,QAAQC,gBAAO,CAAC;AASf,SAASC,aAAa;AAAA,EAC3BxwB,OAAAA;AAAAA,EACAywB;AAAAA,EACAvD;AAAAA,EACAwD,aAAa;AACwD,GAAW;AAChF,UAAQvM,MAAMC,QAAQqM,OAAO,IAAIA,UAAU,CAACA,OAAO,GAChD/tB,IAAKiuB,CAAAA,iBACAA,aAAatrB,SAAS,gBACjB,CACL,CACEurB,MAAM,eAAe,QAAQ5wB,MAAK,GAClC,OAAO2wB,aAAa9tB,KAAO,MAAc,OAAO7C,OAAMwW,UAAUma,aAAa9tB,EAAE,CAAC,EAE/E6Q,OAAOyC,OAAO,EACdpY,KAAK,GAAG,GACXixB,OACEwB,aAAa;AAAA,IACXxwB,OAAAA;AAAAA,IACAywB,SAASE,aAAatM;AAAAA,IACtB6I;AAAAA,IACAwD;AAAAA,EAAAA,CACD,CACH,CAAC,EACD3yB,KAAK;AAAA;AAAA,CAAM,IAER8yB,qBAAqB;AAAA,IAC1B7wB,OAAAA;AAAAA,IACAywB,SAASE;AAAAA,IACTzD;AAAAA,IACAwD;AAAAA,EAAAA,CACD,CACF,EACA3yB,KAAK;AAAA;AAAA,CAAM;AAChB;AAEA,SAAS+yB,cAAcC,KAAqC;AAC1D,SAAO,OAAOA,OAAQ,WAAWA,MAAMA,IAAI3C;AAC7C;AAEA,SAAS4C,WAAWhxB,QAAcixB,SAAyB;AAClB,SAAA;AAAA,IACrCC,MAAMlxB,OAAMmxB,QAAQC;AAAAA,IACpBC,aAAarxB,OAAMsxB,QAAQF,MAAMhf;AAAAA,IACjCmf,kBAAkBvxB,OAAMwxB,SAASJ,MAAMhf;AAAAA,IACvCqf,aAAazxB,OAAM0xB,MAAMN,MAAMhf;AAAAA,IAGnB6e,OAAO;AACvB;AAEA,SAASL,MAAMe,OAAeV,SAAkBjxB,QAAsB;AAC/DswB,SAAAA,QAIEU,WAAWhxB,QAAOixB,OAAO,EAAE,IAAIU,KAAK,GAAG,IAHrC,IAAIA,KAAK;AAIpB;AAEA,MAAMC,iBAAmD;AAAA,EACvDvmB,QAAQ;AAAA,EACRmZ,mBAAmB;AAAA,EACnBnd,iBAAiB;AAAA,EACjB8V,QAAQ;AAAA,EACR0U,OAAO;AACT;AAEA,SAASC,WAAWC,UAAwC;AAC1D,MAAI,QAAQA;AACV,WAAOA,SAASlvB;AAGlB,MAAI,cAAckvB;AAChB,WAAOA,SAASC,SAASpvB;AAI7B;AAEA,MAAMqvB,gBAAgB,IAAIC,KAAKC,WAAW,SAAS;AAAA,EACjD9sB,MAAM;AACR,CAAC;AAED,SAAS+sB,eAAepyB,QAAc+xB,UAAoB7E,WAA8B;AACtF,QAAMmF,eAAezB,MAAMmB,SAAS1sB,MAAMusB,eAAeG,SAAS1sB,IAAI,GAAGrF,MAAK,GAExEsyB,eACJ,cAAcP,YAAY7E,UAAUjC,gBAChC2F,MACE,cAAcmB,WACVA,SAASC,SAAS1qB,QAClB2qB,cAAcM,OAAOrF,UAAUjC,iBAAiB,CAAE,CAAA,GACtD,QACAjrB,MACF,IACA;AAGN,SAAO,CAACqyB,cAAcC,cAActyB,OAAMwW,UAAUsb,WAAWC,QAAQ,CAAC,CAAC,EACtEre,OAAOyC,OAAO,EACdpY,KAAK,GAAG;AACb;AAEO,SAAS8yB,qBAAqB;AAAA,EACnC7wB,OAAAA;AAAAA,EACAywB;AAAAA,EACAvD;AAAAA,EACAwD,aAAa;AACa,GAAW;AAC/B8B,QAAAA,OACJ,aAAa/B,UAAUzwB,OAAMgE,KAAK,iBAAiBysB,QAAQjmB,SAASioB,UAAU,GAAG,IAAI,IACjFC,SAAS,CAACN,eAAepyB,QAAOywB,SAASvD,SAAS,GAAGsF,IAAI,EAAEz0B,KAAK,GAAG,GACnE40B,UAAU,IAAI/C,OAAOc,UAAU;AAErC,MACED,QAAQprB,SAAS,YACjBorB,QAAQprB,SAAS,uBACjBorB,QAAQprB,SAAS;AAEjB,WAAO,CAACqtB,QAAQ;AAAA,GAAM1D,OAAOxvB,KAAKC,UAAUgxB,QAAQuB,UAAU,MAAM,CAAC,GAAGtB,UAAU,CAAC,EAAE3yB,KAAK,EAAE;AAG1F0yB,MAAAA,QAAQprB,SAAS,SAAS;AAC5B,UAAMgrB,OAAOH,cAAyBO,QAAQmC,QAAQjtB,KAAM,CAAA,GACtDopB,gBAAgBtrB,KAAKJ,IAAImrB,aAAa6B,KAAK5B,QAAQ,IAAI,GAAG,EAAE;AAElE,WAAO,CACLiE,QACA;AAAA,GACA7D,WAAsB;AAAA,MACpBC,MAAMuB,KAAK5B;AAAAA,MACXM;AAAAA,MACAC,QAAQ2D;AAAAA,MACRvD,YAAayC,CAAAA,UAAUgB,oBAAoB7yB,QAAO6xB,KAAK;AAAA,IAAA,CACxD,CAAC,EACF9zB,KAAK,EAAE;AAAA,EAAA;AAGJ20B,SAAAA;AACT;AAEA,SAASG,oBAAoB7yB,QAAc6xB,OAA0B;AAC7D,QAAA;AAAA,IAACiB;AAAAA,EAAAA,IAAMjB,OACPkB,gBAAgB/yB,OAAMoS,KAAK0gB,GAAGztB,IAAI;AACxC,MAAIytB,GAAGztB,SAAS;AACd,WAAO,GAAGrF,OAAMwF,IAAIutB,aAAa,CAAC;AAEpC,MAAID,GAAGztB,SAAS;AACd,WAAO,GAAGrF,OAAMqW,OAAO0c,aAAa,CAAC,IAAID,GAAGptB,KAAK;AAEnD,MAAIotB,GAAGztB,SAAS,SAASytB,GAAGztB,SAAS;AACnC,WAAO,GAAGrF,OAAMqW,OAAO0c,aAAa,CAAC,IAAID,GAAGE,MAAM;AAEpD,MAAIF,GAAGztB,SAAS;AACP,WAAA,GAAGrF,OAAMqW,OAAO0c,aAAa,CAAC,IAAIvzB,KAAKC,UAAUqzB,GAAGptB,KAAK,CAAC;AAEnE,MAAIotB,GAAGztB,SAAS;AACP,WAAA,GAAGrF,OAAMgM,MAAM+mB,aAAa,CAAC,IAAIvzB,KAAKC,UAAUqzB,GAAGptB,KAAK,CAAC;AAElE,MAAIotB,GAAGztB,SAAS;AACd,WAAO,GAAGrF,OAAMgM,MAAM+mB,aAAa,CAAC,IAAID,GAAGG,QAAQ,KAAKnC,cACtDgC,GAAGI,aACL,CAAC,KAAK1zB,KAAKC,UAAUqzB,GAAGK,KAAK,CAAC;AAEhC,MAAIL,GAAGztB,SAAS;AACd,WAAO,GAAGrF,OAAMqW,OAAO0c,aAAa,CAAC,IAAIjC,cAAcgC,GAAGI,aAAa,CAAC,KAAK1zB,KAAKC,UAChFqzB,GAAGK,KACL,CAAC;AAEH,MAAIL,GAAGztB,SAAS;AACP,WAAA,GAAGrF,OAAMwF,IAAIutB,aAAa,CAAC,IAAID,GAAGM,UAAU,KAAKN,GAAGO,QAAQ;AAGrE,QAAM,IAAIp3B,MAAM,2BAA2B62B,GAAGztB,IAAI,EAAE;AACtD;AAEA,SAAS2pB,OAAOyB,SAAiB/sB,QAAO,GAAW;AAC3CivB,QAAAA,UAAU,IAAI/C,OAAOlsB,KAAI;AAE/B,SAAO+sB,QACJjT,MAAM;AAAA,CAAI,EACV9a,IAAK4wB,CAAAA,SAASX,UAAUW,IAAI,EAC5Bv1B,KAAK;AAAA,CAAI;AACd;ACtLA,MAAM+J,aAAW;AAAA;AAAA;AAAA,oGAGmFyrB,QAAwB,wBAAA,cAAcC,oCAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6BtK,SAAS3hB,cAAczU,MAAyB;AAC9C,SAAO0U,uBAAMC,QAAAA,QAAQ3U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDoI,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAK,EACnDoC,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAASorB,QAAAA;AAAAA,EAAAA,CAA6B,EAC9EhpB,QAAQ,YAAY;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAK,EACpDoC,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACnCmF,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACvCmF,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACnCmF,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAK,CAAA,EAAE5B;AAC1D;AAEA,MAAMitB,sBAAyD;AAAA,EAC7Dz3B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA;AAAA,EAEbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQsL;AAAAA,MAAQ9I,OAAAA;AAAAA,MAAOtC;AAAAA,IAAWL,IAAAA,SAC9C,CAACwF,EAAE,IAAIzF,KAAKqJ,oBACZitB,0BAA0Bv1B,cAAKJ,QAAAA,KAAKL,SAASmtB,oBAAoB,GAEjEltB,QAAQ,MAAMkU,cAAczU,IAAI,GAEhCu2B,aAAah2B,MAAMg2B,YACnBC,MAAMj2B,MAAMk2B,QACZ/wB,UAAUnF,MAAMmF,SAChBgkB,UAAUnpB,MAAMmpB;AAEtB,QAAKhkB,WAAW,CAACgkB,WAAaA,WAAW,CAAChkB;AAClC,YAAA,IAAI7G,MAAM,qEAAqE;AAGvF,QAAI,CAAC4G,IAAI;AACPrF,aAAOC,MAAMuC,OAAMwF,IAAI,sCAAsC,CAAC;AAC9D,YAAMunB,aAAa,MAAMC,kBAAkBtvB,OAAO,GAC5C+W,QAAQ,IAAIC,0BAAM;AAAA,QACtBE,OAAO;AAAA,QACPD,SAAS,CACP;AAAA,UAAC3Y,MAAM;AAAA,UAAM4Y,OAAO;AAAA,UAAMC,WAAW;AAAA,QAAA,GACrC;AAAA,UAAC7Y,MAAM;AAAA,UAAS4Y,OAAO;AAAA,UAASC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAErD;AAEDkY,iBAAW9oB,QAASgpB,CAAqB,qBAAA;AACvCxY,cAAMK,OAAO;AAAA,UAACjS,IAAIoqB,iBAAiBpqB;AAAAA,UAAI+R,OAAOqY,iBAAiBC,UAAUtY;AAAAA,QAAAA,CAAM;AAAA,MAAA,CAChF,GACDH,MAAMQ,cACNzX,OAAOuC,MAAM,sDAAsD;AAEnE;AAAA,IAAA;AAISqtB,kBAAA;AAAA,MACPvN,QAAQ,OAAOljB,QAAQuC,QAAQkD,MAAM,CAAC,CAAC;AAAA,MACvCirB,WAAW;AAAA,QAAC,kBAAkB;AAAA,MAAA;AAAA,IAAI,CACnC;AAGGQ,UAAAA,aAAazB,uBAAuB1uB,SAASmF,EAAE,GAC/CixB,kBAAkBjG,WAAWna,OAAOgZ,yBAAyB;AAEnE,QAAIoH,gBAAgBluB,SAAS;AAErB,YAAA,IAAI3J,MACR,kCAAkC4G,EAAE,QAAQ7C,OAAMgE,KAAK0vB,uBAAuB,CAAC;AAAA,KAAU7F,WACtFnrB,IAAKorB,CAAAA,cAAc3vB,cAAK41B,QAAAA,SAASL,yBAAyB5F,UAAUtB,YAAY,CAAC,EACjFzuB,KAAK;AAAA,IAAO,CAAC,EAClB;AAGI4uB,UAAAA,SAASmH,gBAAgB,CAAC;AAChC,QAAI,CAACnH;AACG,YAAA,IAAI1wB,MACR,2BAA2B4G,EAAE,QAAQ7C,OAAMgE,KAAKhE,OAAMgE,KAAK0vB,uBAAuB,CAAC,CAAC;AAAA;AAAA;AAAA,KAC1D7F,WAC9BnrB,IAAKorB,CAAAA,cAAc3vB,cAAK41B,QAAAA,SAASL,yBAAyB5F,UAAUtB,YAAY,CAAC,EACjFzuB,KAAK;AAAA,IAAO,CAAC,EACX;AAGF,UAAM0uB,MAAME,OAAOF;AACf,QAAA,QAAQA,OAAO,UAAUA;AAGrB,YAAA,IAAIxwB,MACR,8EACF;AAGF,UAAMixB,YAAuBT,IAAIrkB;AAEjC,QAAIurB,cAAc,CAACC;AACX,YAAA,IAAI33B,MAAM,wDAAwD;AAG1E,UAAMmX,cAAczV,MAAMyV;AAC1B,QAAIA,gBAAgBhI,QAAW;AAC7B,UAAIgI,cAAcmgB,QAAAA;AAChB,cAAM,IAAIt3B,MACR,oDAAoDs3B,QAAAA,wBAAwB,EAC9E;AAGF,UAAIngB,gBAAgB;AAClB,cAAM,IAAInX,MAAM,8CAA8CmX,WAAW,EAAE;AAAA,IAAA;AAI/E,UAAM4gB,gBAAgB5vB,UAAU;AAAA,MAC9BE,aAAa;AAAA,MACbC,gBAAgB;AAAA,IACjB,CAAA,EAAEG,OAAO;AAEN,QAAA,CAACoiB,WAAW,CAACkN,cAAcjxB;AACvB,YAAA,IAAI9G,MACR,6GACF;AAGF,UAAMg4B,YAAY;AAAA,MAChBnxB,SAASA,WAAWkxB,cAAclxB;AAAAA,MAClCC,WAAW+jB,WAAWkN,cAAcjxB;AAAAA,MACpCmxB,SAASF,cAAcE;AAAAA,MACvB3oB,OAAOyoB,cAAczoB;AAAAA,MACrB9G,YAAY;AAAA,IACd;AACA,QAAImvB,KAAK;AACO,oBAAA;AACd;AAAA,IAAA;AAUF,QAPAp2B,OAAOuC,MACL;AAAA,EAAKC,OAAMqW,OAAOrW,OAAMoS,KAAK,qDAAqD,CAAC,CAAC,EACtF,GACA5U,OAAOuC,MACL,wDAAwDC,OAAMgE,KAAK,eAAe,CAAC;AAAA,CACrF,GAEIrG,MAAMw2B,WAQJ,CAPa,MAAMrrB,OAAOM,OAAgB;AAAA,MAC5CtJ,SAAS,kCAAkCE,OAAMqW,OAC/CrW,OAAMoS,KAAK6hB,UAAUnxB,OAAO,CAC9B,CAAC,eAAe9C,OAAMqW,OAAOrW,OAAMoS,KAAK6hB,UAAUlxB,SAAS,CAAC,CAAC;AAAA,MAC7DsC,MAAM;AAAA,IAAA,CACP,GAEc;AACb2E,cAAM,wBAAwB;AAC9B;AAAA,IAAA;AAIJ,UAAMtL,UAAUlB,OAAOkB,QAAQ,sBAAsBmE,EAAE,GAAG,EAAE9G,MAAM;AAClE,UAAMq4B,YAAI;AAAA,MAAClN,KAAK+M;AAAAA,MAAW7gB;AAAAA,MAAaiL,YAAYgW,eAAe31B,OAAO;AAAA,IAAA,GAAIwuB,SAAS,GACvFxuB,QAAQ41B,KAAK;AAEb,aAASD,eAAehiB,iBAAoD;AAC1E,aAAO,SAAoBnF,WAA6B;AAClD,YAAA,CAACvP,MAAMuP,UAAU;AACnBmF,0BAAgBiiB,KAAK;AACrB;AAAA,QAAA;AAEF,YAAIpnB,UAASqnB,MAAM;AACDjuB,0BAAAA,OAAO,cAAczD,EAAE;AAAA;AAAA,iBAEhC7C,OAAMoS,KAAK6hB,UAAUlxB,SAAS,CAAC;AAAA,iBAC/B/C,OAAMoS,KAAK6hB,UAAUnxB,OAAO,CAAC;AAAA;AAAA,IAE1CoK,UAAS+W,SAAS;AAAA,IAClB/W,UAASmX,SAAS;AAAA,IAClBrkB,OAAMgM,MAAMkB,UAASsnB,sBAAsB5uB,MAAM,CAAC,4BAC5CyM,gBAAgBoiB,eAAe;AAAA,YAACC,QAAQ10B,OAAMgM,MAAM,QAAG;AAAA,UAAA,CAAE;AACzD;AAAA,QAAA;AAGD,SAAC,MAAM,GAAGkB,UAASynB,mBAAmB,EAAE1wB,QAASmD,CAAgB,gBAAA;AAChEiL,0BAAgB/L,OAAO,sBAAsBzD,EAAE,KAAK+wB,MAAM,mBAAmB,KAAK;AAAA;AAAA,oBAExE5zB,OAAMoS,KAAK6hB,UAAUlxB,SAAS,CAAC;AAAA,oBAC/B/C,OAAMoS,KAAK6hB,UAAUnxB,OAAO,CAAC;AAAA,oBAC7B9C,OAAMoS,KAAK8a,UAAUjC,eAAeltB,KAAK,GAAG,CAAC,CAAC;AAAA;AAAA,IAE9DmP,UAAS+W,SAAS;AAAA,IAClB/W,UAASmX,SAAS;AAAA,IAClBrkB,OAAM40B,KAAK1nB,UAAS2nB,OAAO,CAAC;AAAA,IAC5B70B,OAAMgM,MAAMkB,UAASsnB,sBAAsB5uB,MAAM,CAAC;AAAA;AAAA,IAGlDwB,eAAe,CAAC8F,UAASqnB,OACrB,QAAK/D,aAAa;AAAA,YAACxwB,OAAAA;AAAAA,YAAOywB,SAASrpB;AAAAA,YAAa8lB;AAAAA,YAAWwD,YAAY;AAAA,UAAA,CAAE,CAAC,KAC1E,EAAC;AAAA,QAAA,CAEA;AAAA,MACH;AAAA,IAAA;AAGF,mBAAeoE,gBAAgB;AAC7Bt3B,aAAOuC,MAAM,sBAAsB8C,EAAE,eAAe,GAEhD8wB,cACFn2B,OAAOuC,MAAM,gBAAgBC,OAAMgE,KAAK2vB,UAAU,CAAC,EAAE,GAGvDn2B,OAAOuC,MAAM,GACbvC,OAAOuC,MAAM,gBAAgBC,OAAMoS,KAAK6hB,UAAUlxB,SAAS,CAAC,EAAE,GAC9DvF,OAAOuC,MAAM,gBAAgBC,OAAMoS,KAAK6hB,UAAUnxB,OAAO,CAAC,EAAE;AAE5D,uBAAiBivB,YAAY8B,eAAO;AAAA,QAAC3M,KAAK+M;AAAAA,QAAWc,YAAYpB;AAAAA,SAAazG,SAAS;AAChF6E,qBACLv0B,OAAOuC,MAAAA,GACPvC,OAAOuC,MACLywB,aAAa;AAAA,UACXxwB,OAAAA;AAAAA,UACAywB,SAASsB;AAAAA,UACT7E;AAAAA,QACD,CAAA,CACH;AAAA,IAAA;AAAA,EAEJ;AAEJ,GC7QMplB,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXktB,iBAAuC;AAAA,EAC3Ch5B,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,aAEsB,MAAM2L,mBAAiB,GAExB5L,MAAMC,OAAO;AAAA,EAEpCyK,UAAAA;AACF;AAEA,eAAekB,qBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC9CA,MAAMP,gBAAc,gCAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgBXmtB,sBAAsB;AAAA,EAC1Bj5B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,yBAAyC;AAAA,EAAA,CAAA,GAEvD+K,QAAQhL,MAA2DC,OAAO;AAEzF,GC/BMwK,gBAAc,8EAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXotB,uBAA6C;AAAA,EACjDl5B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,oBAAoC;AAAA,EAAA,CAAA,GAElD+K,QAAQhL,MAAMC,OAAO;AAEpC;AC7BA,IAAe,cAAA;AAAA,EACbrB,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,MAAMA,gBAAc,6CAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgBXqtB,qBAAqB;AAAA,EACzBn5B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM;;MAEP+K,QAAQhL,MAAyDC,OAAO;AAEvF,GC7BMwK,gBAAc,0CAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAiBXstB,qBAAqB;AAAA,EACzBp5B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,YAAY;AAC/B,UAAMovB,MAAM,MAAM;;QAEZ4I,eAAe;AAAA,MACnB,GAAGj4B;AAAAA,MACHQ,YAAY;AAAA,QACV,GAAGR,KAAKQ;AAAAA,QACR,mBAAmB;AAAA,MAAA;AAAA,IAEvB;AAEO6uB,WAAAA,IAAIrkB,QACTitB,cACAh4B,OACF;AAAA,EAAA;AAEJ,GC3CMwK,cAAc,wDAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAiBX0f,2BAAiD;AAAA,EACrDxrB,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO7K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,qBAAqC;AAAA,EAAA,CAAA,GAEnD+K,QAAQhL,MAAMC,OAAO;AAEpC,GCtBMyK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXwtB,eAAqC;AAAA,EACzCt5B,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN7K,MACAC,YACG;AACG,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO8I;AAAAA,IAAM,IAAIzL,SAC1B0L,gBAAgB,MAAMC,oBAEtBE,OAAQD,CAAAA,QAAgBzL,OAAO0L,KAAKlJ,OAAMqW,OAAOlN,QAAQF,GAAG,CAAC,GAC7DxL,QAASwL,CAAgBzL,QAAAA,OAAO0L,KAAKlJ,OAAMwF,IAAI2D,QAAQF,GAAG,CAAC;AACjEC,SAAK,gXAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,oEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,oEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,gXAA+D,GACpEA,KAAK,EAAE;AAEH,QAAA;AACIH,YAAAA,cAAc3L,MAAMC,OAAO;AAAA,aAC1BE,KAAK;AACZ,UAAIA,IAAIvB,SAAS;AACTuB,cAAAA;AAGFA,YAAAA,IAAIuC,OAAO,GACjBrC,MAAM;AAAA,CAAI,GAGR+K,iBACC,MAAMM,OAAOM,OAAO;AAAA,QACnBtJ,SAAS;AAAA,QACTuF,MAAM;AAAA,MAAA,CACP,IAID,OADkB,MAAMkD,gBACRnL,MAAMC,OAAO,IAI7BV,QAAQ0M,KAAK,CAAC;AAAA,IAAA;AAAA,EAGpB;AAAA,EACAvB,UAAAA;AACF;AAEA,eAAekB,mBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC5FO,SAASmtB,mBAAmBz1B,SAAiB;AAClD,SAAQvC,CAAsE,QAAA;AAC5E,UAAIA,IAAIoM,eAAe,QACrBpM,IAAIuC,UAAUA,UACRvC;AAAAA,EAIV;AACF;ACJA,MAAMuK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAmBX0tB,oBAAuD;AAAA,EAC3Dx5B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQsL;AAAAA,IAAUzL,IAAAA,SAC9B,CAACo4B,aAAa,IAAIr4B,KAAKqJ,oBACvB9I,QAAQP,KAAKQ,YAEbyG,SAASD,UAAYqc,EAAAA,MAAAA,EAAQ/b,OAAO;AAAA,MAACgxB,oBAAoB;AAAA,MAAOjxB,YAAY;AAAA,IAAA,CAAa,GACzF;AAAA,MAAC1B;AAAAA,IAAAA,IAAasB,OAAOK,UACrBixB,SAAS,MAAMtxB,OAAOqH,QAAgB;AAAA,MAACI,KAAK,aAAa/I,SAAS;AAAA,IAAA,CAAS,GAAG2Q,OACjFkiB,CAAAA,UAASA,MAAKC,cACjB,GACMC,QAAQL,iBAAkB,MAAMM,eAAejtB,MAAM,GACrDktB,eAAer4B,MAAMi4B,QAAS,MAAMK,cAAcntB,QAAQ6sB,KAAK,GAC/DC,OAAOD,MAAMluB,KAAK,CAAC;AAAA,MAACzL;AAAAA,UAAUA,KAAKoO,YAAAA,MAAkB4rB,aAAa5rB,aAAa;AACrF,QAAI,CAACwrB;AACH,YAAM,IAAI35B,MAAM,cAAc+5B,YAAY,aAAa;AAGnD3xB,UAAAA,OACHoc,MAAM,EACN/U,QAAQ;AAAA,MACPC,QAAQ;AAAA,MACRG,KAAK,wBAAwB/I,SAAS;AAAA,MACtC+G,MAAM;AAAA,QAACgsB;AAAAA,QAAOF,MAAMA,KAAK55B;AAAAA,MAAI;AAAA,MAC7Bk6B,cAAc;AAAA,MACdxmB,cAAc;AAAA,IACf,CAAA,EACAkP,MACC2W,mBACE,yGACF,CACF,GAEF/3B,OAAOuC,MAAM,sBAAsB+1B,KAAK,EAAE;AAAA,EAAA;AAE9C;AAIA,SAASC,eAAejtB,QAAsC;AAC5D,SAAOA,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACT4T,QAASyiB,CAAQA,QAAAA,IAAI5c,KAAK;AAAA,IAC1B9O,UAAWzO,UACL,CAACA,QAAQ,CAACA,KAAK8Z,SAAS,GAAG,IACtB,kBAGF;AAAA,EAAA,CAEV;AACH;AAEA,SAASmgB,cAAcntB,QAAqB6sB,OAAgC;AAC1E,SAAO7sB,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNvF,SAAS;AAAA,IACTmL,SAAS0qB,MAAMjzB,IAAKkzB,CAAU,UAAA;AAAA,MAC5BlwB,OAAOkwB,KAAK55B;AAAAA,MACZA,MAAM,GAAG45B,KAAKhhB,KAAK,KAAKghB,KAAK/tB,WAAW;AAAA,IAAA,EACxC;AAAA,EAAA,CACH;AACH;ACvFA,MAAMuuB,aAAa,CAAC,MAAM,QAAQ,QAAQ,MAAM,GAE1CtuB,WAAW;AAAA;AAAA;AAAA;AAAA,mDAIkCsuB,WAAWr4B,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAclEs4B,mBAAyC;AAAA,EAC7Cr6B,MAAM;AAAA,EACNgM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO7K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACi5B;AAAAA,MAAMC;AAAAA,MAAOC;AAAAA,MAAQC;AAAAA,IAAAA,IAAe;AAAA,MACzCH,MAAM;AAAA,MACNC,OAAO;AAAA,MACPC,QAAQ;AAAA,MACRC,aAAa;AAAA,MACb,GAAGr5B,KAAKQ;AAAAA,IACV;AAEI,QAAA,CAACw4B,WAAWtgB,SAASwgB,IAAI;AACrB,YAAA,IAAIr6B,MAAM,wBAAwBq6B,IAAI,qBAAqBF,WAAWr4B,KAAK,IAAI,CAAC,EAAE;AAGtFw4B,QAAAA,UAAU,SAASA,UAAU;AAC/B,YAAM,IAAIt6B,MAAM,uBAAuBs6B,KAAK,mCAAmC;AAGjF,UAAMlyB,SAASD,aACTsyB,eAAeryB,OAAOoc,QAAQ/b,OAAO;AAAA,MAACgxB,oBAAoB;AAAA,IAAA,CAAM,GAChE;AAAA,MAAC3yB;AAAAA,QAAasB,OAAOK,OAAAA,GAErBwxB,eAAe,IACf,CAACS,oBAAoB7P,OAAO,IAAI,MAAMnmB,QAAQY,IAAI,CACtDk1B,cACIC,aACGhrB,QAAkB;AAAA,MAACI,KAAK,wBAAwB/I,SAAS;AAAA,MAAImzB;AAAAA,IAAAA,CAAa,EAC1E1d,KAAKoe,qBAAqB,IAC7B,CAAA,GACJF,aAAahrB,QAAgC;AAAA,MAACI,KAAK,aAAa/I,SAAS;AAAA,MAAImzB;AAAAA,IAAa,CAAA,CAAC,CAC5F,GAEKW,YAAY/P,QAAQgQ,QAAQp0B,IAAKq0B,CAAAA,WAAWA,OAAOl0B,EAAE,GACrDm0B,QAAQ,MAAMN,aACjBhrB,QAAuB;AAAA,MAACI,KAAK,UAAU+qB,UAAU94B,KAAK,GAAG,CAAC;AAAA,MAAIm4B;AAAAA,IAAAA,CAAa,EAC3E1d,KAAMye,UAAU9S,MAAMC,QAAQ6S,IAAI,IAAIA,OAAO,CAACA,IAAI,CAAE,GASjDH,UAAU,CAAC,GAPMhQ,QAAQgQ,QAC5Bp0B,IAAKq0B,CAAY,YAAA;AAAA,MAChB,GAAGA;AAAAA,MACH,GAAGG,aAAaF,MAAMvvB,KAAMqmB,eAAcA,UAAUjrB,OAAOk0B,OAAOl0B,EAAE,CAAC;AAAA,IAAA,EACrE,EACD6Q,OAAQqjB,CAAW,WAAA,CAACA,OAAOI,WAAWX,MAAM,GAEX,GAAGG,kBAAkB,GAEnDn0B,UAAUC,gBACdq0B,QAAAA,QAAQp0B,IAAI,CAAC;AAAA,MAACG;AAAAA,MAAI7G;AAAAA,MAAM45B;AAAAA,MAAM1gB;AAAAA,IAAAA,MAAU,CAACrS,IAAI7G,MAAM45B,MAAM1gB,IAAI,CAAC,GAC9D,CAACkhB,WAAWrX,QAAQuX,IAAI,CAAC,CAC3B,GAEMpzB,OAAOqzB,UAAU,QAAQ/zB,UAAUA,QAAQW,QAAQ,GAEnDC,YAAYF,KAAK9B,OACrB,CAACiC,KAAKC,QAAQA,IAAIZ,IAAI,CAACa,SAASC,UAAUC,KAAKJ,IAAIK,cAAAA,QAAKH,OAAO,GAAGF,IAAIG,KAAK,CAAC,CAAC,GAC7E4yB,WAAW1zB,IAAKiB,CAAQD,QAAAA,cAAAA,QAAKC,GAAG,CAAC,CACnC,GAEMC,WAAYN,CAAkB,QAAA;AAC5B8zB,YAAAA,WAAW9zB,IAAI,CAAC,MAAM,aACtB+zB,UAAU/zB,IAAIZ,IAAI,CAACmB,KAAKC,MAAM,GAAGD,GAAG,GAAGE,OAAOX,UAAUU,CAAC,CAAC,CAAC,EAAE/F,KAAK,KAAK;AAC7E,aAAOq5B,WAAWp3B,OAAMs3B,IAAID,OAAO,IAAIA;AAAAA,IACzC;AAEA75B,WAAOuC,MAAMC,OAAMgE,KAAKJ,SAASwyB,UAAU,CAAC,CAAC,GAC7ClzB,KAAKe,QAASX,SAAQ9F,OAAOuC,MAAM6D,SAASN,GAAG,CAAC,CAAC;AAAA,EAAA;AAErD;AAEA,SAAS4zB,aAAaD,MAAwB;AACtC,QAAA;AAAA,IAACM,aAAav7B;AAAAA,IAAMmD,WAAW+V;AAAAA,EAAI,IAAI+hB,QAAQ,CAAC;AAC/C,SAAA;AAAA,IAACj7B,MAAMA,QAAQ;AAAA,IAAIkZ,MAAMA,QAAQ;AAAA,EAAE;AAC5C;AAEA,SAAS0hB,sBAAsBH,aAAuB;AACpD,SAAOA,YACJ/iB,OAAQ8jB,CAAW,WAAA,CAACA,OAAOC,cAAc,CAACD,OAAOE,aAAa,CAACF,OAAOG,gBAAgB,EACtFj1B,IAAK80B,CAAY,YAAA;AAAA,IAChB30B,IAAI;AAAA,IACJ7G,MAAMw7B,OAAO1B;AAAAA,IACbF,MAAM4B,OAAO5B;AAAAA,IACb1gB,MAAMsiB,OAAOr4B;AAAAA,EAAAA,EACb;AACN;AChHO,MAAMy4B,aAAwC;AAAA,EACnD57B,MAAM;AAAA,EACN2L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GC2DMgwB,eAAqE,CACzEnwB,UACAW,kBACAC,eACAP,iBACAc,iBACAwM,cACAyiB,cACA1V,eACAC,iBACAF,qBACA1F,sBACAM,0BACAgB,sBACA2B,sBACAxC,sBACAlB,oBACA+b,cACAC,oBACAC,0BACAjmB,uBACAkmB,6BACAC,4BACAjhB,WACAM,wBACAP,sBACAE,yBACAygB,YACApC,mBACAa,kBACA3N,WACA+B,kBACA3C,mBACAsQ,gBACA3M,wBACAgI,qBACA4E,sBACAjQ,mBACAmB,qBACAZ,yBACA7C,gBACAc,qBACA0R,uBACA9S,wBACAlD,wBACAkF,4BACAI,cACAC,wBACAF,yBACAD,yBACA9e,YACA0sB,cACAiD,aACAC,0BACAtD,sBACAF,gBACAvN,aACAgR,eACA9N,sBAAsB,GAIlB+N,yBAAyB,CAACvD,oBAAoBC,oBAAoBH,mBAAmB,GAGrF0D,WAAiE,CACrE,GAAGd,cACH,GAAI1zB,uBAAuBu0B,yBAAyB,EAAG,GAO5CE,qBAAqB;AAAA,EAChCC,yBAAyB;AAAA,EACzBF;AACF;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"_internal.js","sources":["../../src/_internal/cli/util/timing.ts","../../src/_internal/cli/actions/manifest/extractManifestAction.ts","../../src/_internal/cli/actions/schema/schemaListAction.ts","../../src/_internal/cli/actions/schema/storeSchemasAction.ts","../../src/_internal/cli/commands/app/appGroup.ts","../../src/_internal/cli/commands/app/buildCommand.ts","../../src/_internal/cli/commands/app/deployCommand.ts","../../src/_internal/cli/commands/app/devCommand.ts","../../src/_internal/cli/util/isInteractive.ts","../../src/_internal/cli/commands/dev/devCommand.ts","../../src/_internal/cli/commands/app/startCommand.ts","../../src/_internal/cli/commands/backup/backupGroup.ts","../../src/_internal/cli/actions/backup/parseApiErr.ts","../../src/_internal/cli/debug.ts","../../src/_internal/cli/actions/dataset/validateDatasetName.ts","../../src/_internal/cli/actions/dataset/datasetNamePrompt.ts","../../src/_internal/cli/actions/dataset/chooseDatasetPrompt.ts","../../src/_internal/cli/actions/backup/resolveApiClient.ts","../../src/_internal/cli/commands/backup/disableBackupCommand.ts","../../src/_internal/cli/actions/backup/debug.ts","../../src/_internal/cli/actions/backup/archiveDir.ts","../../src/_internal/cli/actions/backup/chooseBackupIdPrompt.ts","../../src/_internal/cli/actions/backup/cleanupTmpDir.ts","../../src/_internal/cli/actions/backup/withRetry.ts","../../src/_internal/cli/actions/backup/downloadAsset.ts","../../src/_internal/cli/actions/backup/downloadDocument.ts","../../src/_internal/cli/actions/backup/fetchNextBackupPage.ts","../../src/_internal/cli/actions/backup/progressSpinner.ts","../../src/_internal/cli/util/humanFileSize.ts","../../src/_internal/cli/util/isPathDirName.ts","../../src/_internal/cli/commands/backup/downloadBackupCommand.ts","../../src/_internal/cli/commands/backup/enableBackupCommand.ts","../../src/_internal/cli/commands/backup/listBackupCommand.ts","../../src/_internal/cli/commands/build/buildCommand.ts","../../src/_internal/cli/actions/cors/addCorsOrigin.ts","../../src/_internal/cli/commands/cors/addCorsOriginCommand.ts","../../src/_internal/cli/commands/cors/corsGroup.ts","../../src/_internal/cli/commands/cors/deleteCorsOriginCommand.ts","../../src/_internal/cli/commands/cors/listCorsOriginsCommand.ts","../../src/_internal/cli/actions/dataset/alias/validateDatasetAliasName.ts","../../src/_internal/cli/actions/dataset/alias/promptForDatasetAliasName.ts","../../src/_internal/cli/commands/dataset/alias/datasetAliasesClient.ts","../../src/_internal/cli/commands/dataset/alias/createAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/deleteAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/linkAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/unlinkAliasHandler.ts","../../src/_internal/cli/commands/dataset/alias/aliasCommands.ts","../../src/_internal/cli/actions/dataset/listDatasetCopyJobs.ts","../../src/_internal/cli/util/getClientUrl.ts","../../src/_internal/cli/commands/dataset/copyDatasetCommand.ts","../../src/_internal/cli/commands/dataset/createDatasetCommand.ts","../../src/_internal/cli/commands/dataset/datasetGroup.ts","../../src/_internal/cli/commands/dataset/datasetVisibilityCommand.ts","../../src/_internal/cli/commands/dataset/deleteDatasetCommand.ts","../../src/_internal/cli/commands/dataset/exportDatasetCommand.ts","../../src/_internal/cli/commands/dataset/importDatasetCommand.ts","../../src/_internal/cli/commands/dataset/alias/listAliasesHandler.ts","../../src/_internal/cli/commands/dataset/listDatasetsCommand.ts","../../src/_internal/cli/commands/deploy/deployCommand.ts","../../src/_internal/cli/commands/deploy/undeployCommand.ts","../../src/_internal/cli/commands/documents/createDocumentsCommand.ts","../../src/_internal/cli/commands/documents/deleteDocumentsCommand.ts","../../src/_internal/cli/commands/documents/documentsGroup.ts","../../src/_internal/cli/util/colorizeJson.ts","../../src/_internal/cli/commands/documents/getDocumentsCommand.ts","../../src/_internal/cli/commands/documents/queryDocumentsCommand.ts","../../src/_internal/cli/commands/documents/validateDocumentsCommand.ts","../../src/_internal/cli/commands/exec/execCommand.ts","../../src/_internal/cli/commands/graphql/deleteGraphQLAPICommand.ts","../../src/_internal/cli/commands/graphql/deployGraphQLAPICommand.ts","../../src/_internal/cli/commands/graphql/graphqlGroup.ts","../../src/_internal/cli/commands/graphql/listGraphQLAPIsCommand.ts","../../src/_internal/cli/commands/hook/createHookCommand.ts","../../src/_internal/cli/commands/hook/deleteHookCommand.ts","../../src/_internal/cli/commands/hook/hookGroup.ts","../../src/_internal/cli/commands/hook/printHookAttemptCommand.ts","../../src/_internal/cli/commands/hook/listHookLogsCommand.ts","../../src/_internal/cli/commands/hook/listHooksCommand.ts","../../src/_internal/cli/commands/manifest/extractManifestCommand.ts","../../src/_internal/cli/commands/manifest/manifestGroup.ts","../../src/_internal/cli/commands/migration/constants.ts","../../src/_internal/cli/commands/migration/templates/minimalAdvanced.ts","../../src/_internal/cli/commands/migration/templates/minimalSimple.ts","../../src/_internal/cli/commands/migration/templates/renameField.ts","../../src/_internal/cli/commands/migration/templates/renameType.ts","../../src/_internal/cli/commands/migration/templates/stringToPTE.ts","../../src/_internal/cli/commands/migration/createMigrationCommand.ts","../../src/_internal/cli/commands/migration/utils/resolveMigrationScript.ts","../../src/_internal/cli/commands/migration/listMigrationsCommand.ts","../../src/_internal/cli/commands/migration/migrationGroup.ts","../../src/_internal/cli/util/tree.ts","../../src/_internal/cli/commands/migration/prettyMutationFormatter.ts","../../src/_internal/cli/commands/migration/runMigrationCommand.ts","../../src/_internal/cli/commands/preview/previewCommand.ts","../../src/_internal/cli/commands/schema/deleteSchemaCommand.ts","../../src/_internal/cli/commands/schema/extractSchemaCommand.ts","../../src/_internal/cli/commands/schema/schemaGroup.ts","../../src/_internal/cli/commands/schema/schemaListCommand.ts","../../src/_internal/cli/commands/schema/storeSchemaCommand.ts","../../src/_internal/cli/commands/schema/validateSchemaCommand.ts","../../src/_internal/cli/commands/start/startCommand.ts","../../src/_internal/cli/util/prettifyQuotaError.ts","../../src/_internal/cli/commands/users/inviteUserCommand.ts","../../src/_internal/cli/commands/users/listUsersCommand.ts","../../src/_internal/cli/commands/users/usersGroup.ts","../../src/_internal/cli/commands/index.ts"],"sourcesContent":["import {performance} from 'node:perf_hooks'\n\nexport interface TimeMeasurer {\n start: (name: string) => void\n end: (name: string) => number\n getTimings: () => Record<string, number>\n}\n\nexport function getTimer(): TimeMeasurer {\n const timings: Record<string, number> = {}\n const startTimes: Record<string, number> = {}\n\n function start(name: string): void {\n if (typeof startTimes[name] !== 'undefined') {\n throw new Error(`Timer \"${name}\" already started, cannot overwrite`)\n }\n\n startTimes[name] = performance.now()\n }\n\n function end(name: string): number {\n if (typeof startTimes[name] === 'undefined') {\n throw new Error(`Timer \"${name}\" never started, cannot end`)\n }\n\n timings[name] = performance.now() - startTimes[name]\n return timings[name]\n }\n\n return {start, end, getTimings: () => timings}\n}\n","import {createHash} from 'node:crypto'\nimport {mkdir, writeFile} from 'node:fs/promises'\nimport {dirname, join, resolve} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport chalk from 'chalk'\nimport {minutesToMilliseconds} from 'date-fns'\nimport readPkgUp from 'read-pkg-up'\n\nimport {\n type CreateManifest,\n type CreateWorkspaceManifest,\n type ManifestWorkspaceFile,\n} from '../../../manifest/manifestTypes'\nimport {type ExtractManifestWorkerData} from '../../threads/extractManifest'\nimport {getTimer} from '../../util/timing'\n\nexport const MANIFEST_FILENAME = 'create-manifest.json'\nconst SCHEMA_FILENAME_SUFFIX = '.create-schema.json'\nconst TOOLS_FILENAME_SUFFIX = '.create-tools.json'\n\n/** Escape-hatch env flags to change action behavior */\nconst FEATURE_ENABLED_ENV_NAME = 'SANITY_CLI_EXTRACT_MANIFEST_ENABLED'\nconst EXTRACT_MANIFEST_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] !== 'false'\nconst EXTRACT_MANIFEST_LOG_ERRORS = process.env.SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS === 'true'\n\nconst CREATE_TIMER = 'create-manifest'\n\nconst EXTRACT_TASK_TIMEOUT_MS = minutesToMilliseconds(2)\n\nconst EXTRACT_FAILURE_MESSAGE =\n \"↳ Couldn't extract manifest file. Sanity Create will not be available for the studio.\\n\" +\n ` Disable this message with ${FEATURE_ENABLED_ENV_NAME}=false`\n\nexport interface ExtractManifestFlags {\n path?: string\n}\n\n/**\n * This function will never throw.\n * @returns `undefined` if extract succeeded - caught error if it failed\n */\nexport async function extractManifestSafe(\n args: CliCommandArguments<ExtractManifestFlags>,\n context: CliCommandContext,\n): Promise<Error | undefined> {\n if (!EXTRACT_MANIFEST_ENABLED) {\n return undefined\n }\n\n try {\n await extractManifest(args, context)\n return undefined\n } catch (err) {\n if (EXTRACT_MANIFEST_LOG_ERRORS) {\n context.output.error(err)\n }\n return err\n }\n}\n\nasync function extractManifest(\n args: CliCommandArguments<ExtractManifestFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const {output, workDir} = context\n\n const flags = args.extOptions\n const defaultOutputDir = resolve(join(workDir, 'dist'))\n\n const outputDir = resolve(defaultOutputDir)\n const defaultStaticPath = join(outputDir, 'static')\n\n const staticPath = flags.path ?? defaultStaticPath\n\n const path = join(staticPath, MANIFEST_FILENAME)\n\n const rootPkgPath = readPkgUp.sync({cwd: __dirname})?.path\n if (!rootPkgPath) {\n throw new Error('Could not find root directory for `sanity` package')\n }\n\n const timer = getTimer()\n timer.start(CREATE_TIMER)\n const spinner = output.spinner({}).start('Extracting manifest')\n\n try {\n const workspaceManifests = await getWorkspaceManifests({rootPkgPath, workDir})\n await mkdir(staticPath, {recursive: true})\n\n const workspaceFiles = await writeWorkspaceFiles(workspaceManifests, staticPath)\n\n const manifest: CreateManifest = {\n /**\n * Version history:\n * 1: Initial release.\n * 2: Added tools file.\n */\n version: 2,\n createdAt: new Date().toISOString(),\n workspaces: workspaceFiles,\n }\n\n await writeFile(path, JSON.stringify(manifest, null, 2))\n const manifestDuration = timer.end(CREATE_TIMER)\n\n spinner.succeed(`Extracted manifest (${manifestDuration.toFixed()}ms)`)\n } catch (err) {\n spinner.fail(err.message)\n output.print(chalk.gray(EXTRACT_FAILURE_MESSAGE))\n throw err\n }\n}\n\nasync function getWorkspaceManifests({\n rootPkgPath,\n workDir,\n}: {\n rootPkgPath: string\n workDir: string\n}): Promise<CreateWorkspaceManifest[]> {\n const workerPath = join(\n dirname(rootPkgPath),\n 'lib',\n '_internal',\n 'cli',\n 'threads',\n 'extractManifest.js',\n )\n\n const worker = new Worker(workerPath, {\n workerData: {workDir} satisfies ExtractManifestWorkerData,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n\n let timeout = false\n const timeoutId = setTimeout(() => {\n timeout = true\n worker.terminate()\n }, EXTRACT_TASK_TIMEOUT_MS)\n\n try {\n return await new Promise<CreateWorkspaceManifest[]>((resolveWorkspaces, reject) => {\n const buffer: CreateWorkspaceManifest[] = []\n worker.addListener('message', (message) => buffer.push(message))\n worker.addListener('exit', (exitCode) => {\n if (exitCode === 0) {\n resolveWorkspaces(buffer)\n } else if (timeout) {\n reject(new Error(`Extract manifest was aborted after ${EXTRACT_TASK_TIMEOUT_MS}ms`))\n }\n })\n worker.addListener('error', reject)\n })\n } finally {\n clearTimeout(timeoutId)\n }\n}\n\nfunction writeWorkspaceFiles(\n manifestWorkspaces: CreateWorkspaceManifest[],\n staticPath: string,\n): Promise<ManifestWorkspaceFile[]> {\n const output = manifestWorkspaces.reduce<Promise<ManifestWorkspaceFile>[]>(\n (workspaces, workspace) => {\n return [...workspaces, writeWorkspaceFile(workspace, staticPath)]\n },\n [],\n )\n return Promise.all(output)\n}\n\nasync function writeWorkspaceFile(\n workspace: CreateWorkspaceManifest,\n staticPath: string,\n): Promise<ManifestWorkspaceFile> {\n const [schemaFilename, toolsFilename] = await Promise.all([\n createFile(staticPath, workspace.schema, SCHEMA_FILENAME_SUFFIX),\n createFile(staticPath, workspace.tools, TOOLS_FILENAME_SUFFIX),\n ])\n\n return {\n ...workspace,\n schema: schemaFilename,\n tools: toolsFilename,\n }\n}\n\nconst createFile = async (path: string, content: any, filenameSuffix: string) => {\n const stringifiedContent = JSON.stringify(content, null, 2)\n const hash = createHash('sha1').update(stringifiedContent).digest('hex')\n const filename = `${hash.slice(0, 8)}${filenameSuffix}`\n\n // workspaces with identical data will overwrite each others file. This is ok, since they are identical and can be shared\n await writeFile(join(path, filename), stringifiedContent)\n\n return filename\n}\n","import {type CliCommandArguments, type CliCommandContext, type CliOutputter} from '@sanity/cli'\nimport {type SanityDocument} from '@sanity/client'\nimport chalk from 'chalk'\nimport {size, sortBy, uniqBy} from 'lodash'\n\nimport {type ManifestWorkspaceFile} from '../../../manifest/manifestTypes'\nimport {\n getManifestPath,\n readManifest,\n SCHEMA_STORE_ENABLED,\n throwIfProjectIdMismatch,\n} from './storeSchemasAction'\n\nexport interface SchemaListFlags {\n 'json': boolean\n 'id': string\n 'manifest-dir': string\n}\n\ntype PrintSchemaListArgs = {\n schemas: SanityDocument[]\n output: CliOutputter\n dataset: string\n projectId: string\n path: string\n}\n\nexport const SANITY_WORKSPACE_SCHEMA_TYPE = 'sanity.workspace.schema'\n\nconst printSchemaList = ({\n schemas,\n output,\n}: Omit<PrintSchemaListArgs, 'path' | 'dataset' | 'projectId'>) => {\n const ordered = sortBy(\n schemas.map(({_createdAt: createdAt, _id: id, workspace}) => {\n return [id, workspace.name, workspace.dataset, workspace.projectId, createdAt].map(String)\n }),\n ['createdAt'],\n )\n const headings = ['Id', 'Workspace', 'Dataset', 'ProjectId', 'CreatedAt']\n const rows = ordered.reverse()\n\n const maxWidths = rows.reduce(\n (max, row) => row.map((current, index) => Math.max(size(current), max[index])),\n headings.map((str) => size(str)),\n )\n\n const printRow = (row: string[]) => row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(' ')\n\n output.print(chalk.cyan(printRow(headings)))\n rows.forEach((row) => output.print(printRow(row)))\n}\n\nexport default async function schemaListAction(\n args: CliCommandArguments<SchemaListFlags>,\n context: CliCommandContext,\n): Promise<void> {\n if (!SCHEMA_STORE_ENABLED) {\n return\n }\n\n const flags = args.extOptions\n if (typeof flags.id === 'boolean') throw new Error('Schema ID is empty')\n if (typeof flags['manifest-dir'] === 'boolean') throw new Error('Manifest directory is empty')\n\n const {apiClient, output} = context\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n }).withConfig({apiVersion: 'v2024-08-01'})\n\n const projectId = client.config().projectId\n const dataset = client.config().dataset\n\n if (!projectId || !dataset) {\n output.error('Project ID and dataset must be defined.')\n return\n }\n\n const manifestDir = flags['manifest-dir']\n const manifestPath = getManifestPath(context, manifestDir)\n const manifest = await readManifest(manifestPath, context)\n\n // Gather all schemas\n const results = await Promise.allSettled(\n uniqBy<ManifestWorkspaceFile>(manifest.workspaces, 'dataset').map(async (workspace) => {\n throwIfProjectIdMismatch(workspace, projectId)\n if (flags.id) {\n // Fetch a specific schema by id\n const schemaRes = await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n })\n .getDocument(flags.id)\n if (!schemaRes) {\n throw new Error(`Schema \"${flags.id}\" not found in dataset \"${workspace.dataset}\"`)\n }\n return schemaRes\n }\n // Fetch all schemas\n return await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n useCdn: false,\n })\n .fetch<SanityDocument[]>(`*[_type == $type]`, {\n type: SANITY_WORKSPACE_SCHEMA_TYPE,\n })\n }),\n )\n\n // Log errors and collect successful results\n const schemas = results\n .map((result, index) => {\n if (result.status === 'rejected') {\n const workspace = manifest.workspaces[index]\n output.error(\n chalk.red(\n `Failed to fetch schemas for workspace '${workspace.name}': ${result.reason.message}`,\n ),\n )\n return []\n }\n return result.value\n })\n .flat()\n\n if (schemas.length === 0) {\n output.error(`No schemas found`)\n return\n }\n\n if (flags.json) {\n output.print(`${JSON.stringify(flags.id ? schemas[0] : schemas, null, 2)}`)\n } else {\n printSchemaList({schemas: schemas as SanityDocument[], output})\n }\n}\n","import {readFileSync, statSync} from 'node:fs'\nimport path, {join, resolve} from 'node:path'\n\nimport {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport chalk from 'chalk'\n\nimport {type ManifestSchemaType, type ManifestWorkspaceFile} from '../../../manifest/manifestTypes'\nimport {\n type ExtractManifestFlags,\n extractManifestSafe,\n MANIFEST_FILENAME,\n} from '../manifest/extractManifestAction'\nimport {SANITY_WORKSPACE_SCHEMA_TYPE} from './schemaListAction'\n\nconst FEATURE_ENABLED_ENV_NAME = 'SANITY_CLI_SCHEMA_STORE_ENABLED'\nexport const SCHEMA_STORE_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] === 'true'\n\nexport interface StoreManifestSchemasFlags {\n 'manifest-dir'?: string\n 'workspace'?: string\n 'id-prefix'?: string\n 'schema-required'?: boolean\n 'verbose'?: boolean\n}\n\nexport const getManifestPath = (context: CliCommandContext, customPath?: string) => {\n const defaultOutputDir = resolve(join(context.workDir, 'dist'))\n\n const outputDir = resolve(defaultOutputDir)\n const defaultStaticPath = join(outputDir, 'static')\n\n const staticPath = customPath ?? defaultStaticPath\n const manifestPath = path.resolve(process.cwd(), staticPath)\n return manifestPath\n}\n\n/**\n * Helper function to read and parse a manifest file with logging\n */\nconst readAndParseManifest = (manifestPath: string, context: CliCommandContext) => {\n const content = readFileSync(manifestPath, 'utf-8')\n const stats = statSync(manifestPath)\n const lastModified = stats.mtime.toISOString()\n context.output.print(\n chalk.gray(`↳ Read manifest from ${manifestPath} (last modified: ${lastModified})`),\n )\n return JSON.parse(content)\n}\n\nexport const readManifest = async (readPath: string, context: CliCommandContext) => {\n const manifestPath = `${readPath}/${MANIFEST_FILENAME}`\n\n try {\n return readAndParseManifest(manifestPath, context)\n } catch (error) {\n await extractManifestSafe(\n {\n extOptions: {path: readPath},\n groupOrCommand: 'extract',\n argv: [],\n argsWithoutOptions: [],\n extraArguments: [],\n } as CliCommandArguments<ExtractManifestFlags>,\n context,\n )\n\n // Try reading the manifest again after extraction\n try {\n return readAndParseManifest(manifestPath, context)\n } catch (retryError) {\n const errorMessage = `Failed to read manifest at ${manifestPath}`\n // We should log the error too for consistency\n context.output.error(errorMessage)\n throw retryError\n }\n }\n}\n\n// At the moment schema store deos not support studios where workspaces have multiple projects\nexport const throwIfProjectIdMismatch = (\n workspace: ManifestWorkspaceFile,\n projectId: string,\n): void => {\n if (workspace.projectId !== projectId) {\n throw new Error(\n `↳ No permissions to store schema for workspace ${workspace.name} with projectId: ${workspace.projectId}`,\n )\n }\n}\n\nexport default async function storeSchemasAction(\n args: CliCommandArguments<StoreManifestSchemasFlags>,\n context: CliCommandContext,\n): Promise<Error | undefined> {\n if (!SCHEMA_STORE_ENABLED) {\n return undefined\n }\n\n const flags = args.extOptions\n\n const schemaRequired = flags['schema-required']\n const workspaceName = flags.workspace\n const idPrefix = flags['id-prefix']\n const verbose = flags.verbose\n const manifestDir = flags['manifest-dir']\n\n if (typeof manifestDir === 'boolean') throw new Error('Manifest directory is empty')\n if (typeof idPrefix === 'boolean') throw new Error('Id prefix is empty')\n if (typeof workspaceName === 'boolean') throw new Error('Workspace is empty')\n\n const {output, apiClient} = context\n\n const manifestPath = getManifestPath(context, manifestDir)\n\n try {\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n }).withConfig({apiVersion: 'v2024-08-01'})\n\n const projectId = client.config().projectId\n if (!projectId) throw new Error('Project ID is not defined')\n\n const manifest = await readManifest(manifestPath, context)\n\n let storedCount = 0\n\n let error: Error | undefined\n\n const saveSchema = async (workspace: ManifestWorkspaceFile) => {\n const id = `${idPrefix ? `${idPrefix}.` : ''}${SANITY_WORKSPACE_SCHEMA_TYPE}.${workspace.name}`\n try {\n throwIfProjectIdMismatch(workspace, projectId)\n const schema = JSON.parse(\n readFileSync(`${manifestPath}/${workspace.schema}`, 'utf-8'),\n ) as ManifestSchemaType\n await client\n .withConfig({\n dataset: workspace.dataset,\n projectId: workspace.projectId,\n })\n .transaction()\n .createOrReplace({_type: SANITY_WORKSPACE_SCHEMA_TYPE, _id: id, workspace, schema})\n .commit()\n storedCount++\n } catch (err) {\n error = err\n output.error(\n `Error storing schema for workspace '${workspace.name}':\\n${chalk.red(`${err.message}`)}`,\n )\n if (schemaRequired) throw err\n } finally {\n if (verbose) {\n output.print(\n chalk.gray(`↳ schemaId: ${id}, projectId: ${projectId}, dataset: ${workspace.dataset}`),\n )\n }\n }\n }\n\n // If a workspace name is provided, only save the schema for that workspace\n if (workspaceName) {\n const workspaceToSave = manifest.workspaces.find(\n (workspace: ManifestWorkspaceFile) => workspace.name === workspaceName,\n )\n if (!workspaceToSave) {\n output.error(`Workspace ${workspaceName} not found in manifest`)\n throw new Error(`Workspace ${workspaceName} not found in manifest: projectID: ${projectId}`)\n }\n await saveSchema(workspaceToSave as ManifestWorkspaceFile)\n output.success(`Stored 1 schemas`)\n } else {\n await Promise.all(\n manifest.workspaces.map(async (workspace: ManifestWorkspaceFile): Promise<void> => {\n await saveSchema(workspace)\n }),\n )\n output.success(`Stored ${storedCount}/${manifest.workspaces.length} schemas`)\n }\n\n if (error) throw error\n return undefined\n } catch (err) {\n // if this flag is set, throw the error and exit without deploying otherwise just log the error\n if (schemaRequired) throw err\n return err\n } finally {\n output.print(`${chalk.gray('↳ List stored schemas with:')} ${chalk.cyan('sanity schema list')}`)\n }\n}\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst appGroup: CliCommandGroupDefinition = {\n name: 'app',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages non-studio applications',\n}\n\nexport default appGroup\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type BuildSanityStudioCommandFlags} from '../../actions/build/buildAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity app build\n sanity app build --no-minify --source-maps\n`\n\nconst appBuildCommand: CliCommandDefinition = {\n name: 'build',\n group: 'app',\n signature: '[OUTPUT_DIR]',\n description: 'Builds the Sanity application configuration into a static bundle',\n action: async (\n args: CliCommandArguments<BuildSanityStudioCommandFlags>,\n context: CliCommandContext,\n overrides?: {basePath?: string},\n ) => {\n const buildAction = await getBuildAction()\n\n return buildAction(args, context, overrides)\n },\n helpText,\n}\n\nasync function getBuildAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/build/buildAction') = require('../../actions/build/buildAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/build/buildAction')\n\n return mod.default\n}\n\nexport default appBuildCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeployStudioActionFlags} from '../../actions/deploy/deployAction'\nimport {SCHEMA_STORE_ENABLED} from '../../actions/schema/storeSchemasAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n --no-build Don't build the application prior to deploy, instead deploying the version currently in \\`dist/\\`\n ${SCHEMA_STORE_ENABLED ? '--verbose Enable verbose logging for the schema store' : ''}\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity deploy\n sanity deploy --no-minify --source-maps\n`\n\nconst appDeployCommand: CliCommandDefinition = {\n name: 'deploy',\n group: 'app',\n signature: '[SOURCE_DIR] [--no-build] [--source-maps] [--no-minify]',\n description: 'Builds and deploys Sanity application to Sanity hosting',\n action: async (\n args: CliCommandArguments<DeployStudioActionFlags>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/deployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default appDeployCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartDevServerCommandFlags} from '../../actions/dev/devAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity app dev --host=0.0.0.0\n sanity app dev --port=1942\n`\n\nconst appDevCommand: CliCommandDefinition = {\n name: 'dev',\n group: 'app',\n signature: '[--port <port>] [--host <host>]',\n description: 'Starts a local dev server for your Sanity application with live reloading',\n action: async (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const devAction = await getDevAction()\n\n return devAction(args, context)\n },\n helpText,\n}\n\nexport async function getDevAction(): Promise<\n (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => Promise<void>\n> {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/dev/devAction') = require('../../actions/dev/devAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/dev/devAction')\n\n return mod.default\n}\n\nexport default appDevCommand\n","/* eslint-disable no-process-env */\nexport const isInteractive =\n process.stdout.isTTY && process.env.TERM !== 'dumb' && !('CI' in process.env)\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartDevServerCommandFlags} from '../../actions/dev/devAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity dev --host=0.0.0.0\n sanity dev --port=1942\n`\n\nconst devCommand: CliCommandDefinition = {\n name: 'dev',\n signature: '[--port <port>] [--host <host>]',\n description: 'Starts a local dev server for Sanity Studio with live reloading',\n action: async (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const devAction = await getDevAction()\n\n return devAction(args, context)\n },\n helpText,\n}\n\nexport async function getDevAction(): Promise<\n (\n args: CliCommandArguments<StartDevServerCommandFlags>,\n context: CliCommandContext,\n ) => Promise<void>\n> {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/dev/devAction') = require('../../actions/dev/devAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/dev/devAction')\n\n return mod.default\n}\n\nexport default devCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\nimport {isInteractive} from '../../util/isInteractive'\nimport {getDevAction} from '../dev/devCommand'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new CORS-entry to be added.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity app start --host=0.0.0.0\n sanity app start --port=1942\n sanity app start some/build-output-dir\n`\n\nconst appStartCommand: CliCommandDefinition = {\n name: 'start',\n group: 'app',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Previews a built Sanity application',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const {output, chalk, prompt} = context\n const previewAction = await getPreviewAction()\n\n const error = (msg: string) => output.warn(chalk.red.bgBlack(msg))\n\n try {\n await previewAction(args, context)\n } catch (err) {\n if (err.name !== 'BUILD_NOT_FOUND') {\n throw err\n }\n\n error(err.message)\n error('\\n')\n\n const shouldRunDevServer =\n isInteractive &&\n (await prompt.single({\n message: 'Do you want to start a development server instead?',\n type: 'confirm',\n }))\n\n if (shouldRunDevServer) {\n const devAction = await getDevAction()\n await devAction(args, context)\n } else {\n // Indicate that this isn't an expected exit\n // eslint-disable-next-line no-process-exit\n process.exit(1)\n }\n }\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default appStartCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\n// defaultApiVersion is the backend API version used for dataset backup.\nexport const defaultApiVersion = 'v2024-02-21'\n\nconst datasetBackupGroup: CliCommandGroupDefinition = {\n name: 'backup',\n signature: '[COMMAND]',\n description: 'Manage backups.',\n isGroupRoot: true,\n}\n\nexport default datasetBackupGroup\n","// apiErr is a type that represents an error returned by the API\ninterface ApiErr {\n statusCode: number\n message: string\n}\n\n// parseApiErr is a function that attempts with the best effort to parse\n// an error returned by the API since different API endpoint may end up\n// returning different error structures.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any,@typescript-eslint/explicit-module-boundary-types\nfunction parseApiErr(err: any): ApiErr {\n const apiErr = {} as ApiErr\n if (err.code) {\n apiErr.statusCode = err.code\n } else if (err.statusCode) {\n apiErr.statusCode = err.statusCode\n }\n\n if (err.message) {\n apiErr.message = err.message\n } else if (err.statusMessage) {\n apiErr.message = err.statusMessage\n } else if (err?.response?.body?.message) {\n apiErr.message = err.response.body.message\n } else if (err?.response?.data?.message) {\n apiErr.message = err.response.data.message\n } else {\n // If no message can be extracted, print the whole error.\n apiErr.message = JSON.stringify(err)\n }\n\n return apiErr\n}\n\nexport default parseApiErr\n","import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:core')\n","const MAX_DATASET_NAME_LENGTH = 64\n\nexport function validateDatasetName(datasetName: string): false | string {\n if (!datasetName) {\n return 'Dataset name is missing'\n }\n\n const name = `${datasetName}`\n\n if (name.toLowerCase() !== name) {\n return 'Dataset name must be all lowercase characters'\n }\n\n if (name.length < 2) {\n return 'Dataset name must be at least two characters long'\n }\n\n if (name.length > MAX_DATASET_NAME_LENGTH) {\n return `Dataset name must be at most ${MAX_DATASET_NAME_LENGTH} characters`\n }\n\n if (!/^[a-z0-9]/.test(name)) {\n return 'Dataset name must start with a letter or a number'\n }\n\n if (!/^[a-z0-9][-_a-z0-9]+$/.test(name)) {\n return 'Dataset name must only contain letters, numbers, dashes and underscores'\n }\n\n if (/[-_]$/.test(name)) {\n return 'Dataset name must not end with a dash or an underscore'\n }\n\n return false\n}\n","import {type CliPrompter} from '@sanity/cli'\n\nimport {validateDatasetName} from './validateDatasetName'\n\nexport function promptForDatasetName(\n prompt: CliPrompter,\n options: {message?: string; default?: string} = {},\n): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Dataset name:',\n validate: (name) => {\n const err = validateDatasetName(name)\n if (err) {\n return err\n }\n\n return true\n },\n ...options,\n })\n}\n","import {type CliCommandContext} from '@sanity/cli'\n\nimport {debug} from '../../debug'\nimport {promptForDatasetName} from './datasetNamePrompt'\n\nexport async function chooseDatasetPrompt(\n context: CliCommandContext,\n options: {message?: string; allowCreation?: boolean} = {},\n): Promise<string> {\n const {apiClient, prompt} = context\n const {message, allowCreation} = options\n const client = apiClient()\n\n const datasets = await client.datasets.list()\n const hasProduction = datasets.find((dataset) => dataset.name === 'production')\n const datasetChoices = datasets.map((dataset) => ({value: dataset.name}))\n const selected = await prompt.single({\n message: message || 'Select dataset to use',\n type: 'list',\n choices: allowCreation\n ? [{value: 'new', name: 'Create new dataset'}, new prompt.Separator(), ...datasetChoices]\n : datasetChoices,\n })\n\n if (selected === 'new') {\n debug('User wants to create a new dataset, prompting for name')\n const newDatasetName = await promptForDatasetName(prompt, {\n message: 'Name your dataset:',\n default: hasProduction ? undefined : 'production',\n })\n await client.datasets.create(newDatasetName)\n return newDatasetName\n }\n\n return selected\n}\n","import {type CliCommandContext} from '@sanity/cli'\nimport {type SanityClient} from '@sanity/client'\n\nimport {chooseDatasetPrompt} from '../dataset/chooseDatasetPrompt'\n\ntype ResolvedApiClient = {\n projectId: string\n datasetName: string\n token?: string\n client: SanityClient\n}\n\nasync function resolveApiClient(\n context: CliCommandContext,\n datasetName: string,\n apiVersion: string,\n): Promise<ResolvedApiClient> {\n const {apiClient} = context\n\n let client = apiClient()\n const {projectId, token} = client.config()\n\n if (!projectId) {\n throw new Error('Project ID not defined')\n }\n\n // If no dataset provided, explicitly ask for dataset instead of using dataset\n // configured in Sanity config. Aligns with `sanity dataset export` behavior.\n let selectedDataset: string = datasetName\n if (!selectedDataset) {\n selectedDataset = await chooseDatasetPrompt(context, {\n message: 'Select the dataset name:',\n })\n }\n\n client = client.withConfig({dataset: datasetName, apiVersion})\n\n return {\n projectId,\n datasetName: selectedDataset,\n token,\n client,\n }\n}\n\nexport default resolveApiClient\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst helpText = `\nExamples\n sanity backup disable DATASET_NAME\n`\n\nconst disableDatasetBackupCommand: CliCommandDefinition = {\n name: 'disable',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Disable backup for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const [dataset] = args.argsWithoutOptions\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n try {\n await client.request({\n method: 'PUT',\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,\n body: {\n enabled: false,\n },\n })\n output.print(`${chalk.green(`Disabled daily backups for dataset ${datasetName}\\n`)}`)\n } catch (error) {\n const {message} = parseApiErr(error)\n output.print(`${chalk.red(`Disabling dataset backup failed: ${message}`)}\\n`)\n }\n },\n}\n\nexport default disableDatasetBackupCommand\n","export default require('debug')('sanity:backup')\n","import {createWriteStream} from 'node:fs'\nimport zlib from 'node:zlib'\n\nimport {type ProgressData} from 'archiver'\n\nimport debug from './debug'\n\nconst archiver = require('archiver')\n\n// ProgressCb is a callback that is called with the number of bytes processed so far.\ntype ProgressCb = (processedBytes: number) => void\n\n// archiveDir creates a tarball of the given directory and writes it to the given file path.\nfunction archiveDir(tmpOutDir: string, outFilePath: string, progressCb: ProgressCb): Promise<void> {\n return new Promise((resolve, reject) => {\n const archiveDestination = createWriteStream(outFilePath)\n archiveDestination.on('error', (err: Error) => {\n reject(err)\n })\n\n archiveDestination.on('close', () => {\n resolve()\n })\n\n const archive = archiver('tar', {\n gzip: true,\n gzipOptions: {level: zlib.constants.Z_DEFAULT_COMPRESSION},\n })\n\n archive.on('error', (err: Error) => {\n debug('Archiving errored!\\n%s', err.stack)\n reject(err)\n })\n\n // Catch warnings for non-blocking errors (stat failures and others)\n archive.on('warning', (err: Error) => {\n debug('Archive warning: %s', err.message)\n })\n\n archive.on('progress', (progress: ProgressData) => {\n progressCb(progress.fs.processedBytes)\n })\n\n // Pipe archive data to the file\n archive.pipe(archiveDestination)\n archive.directory(tmpOutDir, false)\n archive.finalize()\n })\n}\n\nexport default archiveDir\n","import {type CliCommandContext} from '@sanity/cli'\n\nimport {defaultApiVersion} from '../../commands/backup/backupGroup'\nimport resolveApiClient from './resolveApiClient'\n\n// maxBackupIdsShown is the maximum number of backup IDs to show in the prompt.\n// Higher numbers will cause the prompt to be slow.\nconst maxBackupIdsShown = 100\n\nasync function chooseBackupIdPrompt(\n context: CliCommandContext,\n datasetName: string,\n): Promise<string> {\n const {prompt} = context\n\n const {projectId, token, client} = await resolveApiClient(context, datasetName, defaultApiVersion)\n\n try {\n // Fetch last $maxBackupIdsShown backups for this dataset.\n // We expect here that API returns backups sorted by creation date in descending order.\n const response = await client.request({\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/backups`,\n query: {limit: maxBackupIdsShown.toString()},\n })\n\n if (response?.backups?.length > 0) {\n const backupIdChoices = response.backups.map((backup: {id: string}) => ({\n value: backup.id,\n }))\n const selected = await prompt.single({\n message: `Select backup ID to use (only last ${maxBackupIdsShown} shown)`,\n type: 'list',\n choices: backupIdChoices,\n })\n\n return selected\n }\n } catch (err) {\n throw new Error(`Failed to fetch backups for dataset ${datasetName}: ${err.message}`)\n }\n\n throw new Error('No backups found')\n}\n\nexport default chooseBackupIdPrompt\n","import {rimraf} from 'rimraf'\n\nimport debug from './debug'\n\nasync function cleanupTmpDir(tmpDir: string): Promise<void> {\n try {\n await rimraf(tmpDir)\n } catch (err) {\n debug(`Error cleaning up temporary files: ${err.message}`)\n }\n}\n\nexport default cleanupTmpDir\n","import debug from './debug'\n\nconst MAX_RETRIES = 5\nconst BACKOFF_DELAY_BASE = 200\n\nconst exponentialBackoff = (retryCount: number) => Math.pow(2, retryCount) * BACKOFF_DELAY_BASE\n\nasync function withRetry<T>(\n operation: () => Promise<T>,\n maxRetries: number = MAX_RETRIES,\n): Promise<T> {\n for (let retryCount = 0; retryCount < maxRetries; retryCount++) {\n try {\n return await operation()\n } catch (err) {\n // Immediately rethrow if the error is not server-related.\n if (err.response && err.response.statusCode && err.response.statusCode < 500) {\n throw err\n }\n\n const retryDelay = exponentialBackoff(retryCount)\n debug(`Error encountered, retrying after ${retryDelay}ms: %s`, err.message)\n await new Promise((resolve) => setTimeout(resolve, retryDelay))\n }\n }\n\n throw new Error('Operation failed after all retries')\n}\n\nexport default withRetry\n","import {createWriteStream} from 'node:fs'\nimport path from 'node:path'\nimport {pipeline} from 'node:stream/promises'\n\nimport {getIt} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {keepAlive, promise} from 'get-it/middleware'\n\nimport debug from './debug'\nimport withRetry from './withRetry'\n\nconst CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds\nconst READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes\n\nconst request = getIt([keepAlive(), promise()])\n\nasync function downloadAsset(\n url: string,\n fileName: string,\n fileType: string,\n outDir: string,\n): Promise<void> {\n // File names that contain a path to file (e.g. sanity-storage/assets/file-name.tar.gz) fail when archive is\n // created due to missing parent dir (e.g. sanity-storage/assets), so we want to handle them by taking\n // the base name as file name.\n const normalizedFileName = path.basename(fileName)\n\n const assetFilePath = getAssetFilePath(normalizedFileName, fileType, outDir)\n await withRetry(async () => {\n const response = await request({\n url: url,\n maxRedirects: 5,\n timeout: {connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT},\n stream: true,\n })\n\n debug('Received asset %s with status code %d', normalizedFileName, response?.statusCode)\n\n await pipeline(response.body, createWriteStream(assetFilePath))\n })\n}\n\nfunction getAssetFilePath(fileName: string, fileType: string, outDir: string): string {\n // Set assetFilePath if we are downloading an asset file.\n // If it's a JSON document, assetFilePath will be an empty string.\n let assetFilePath = ''\n if (fileType === 'image') {\n assetFilePath = path.join(outDir, 'images', fileName)\n } else if (fileType === 'file') {\n assetFilePath = path.join(outDir, 'files', fileName)\n }\n\n return assetFilePath\n}\n\nexport default downloadAsset\n","import {getIt, type MiddlewareResponse} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {keepAlive, promise} from 'get-it/middleware'\n\nimport debug from './debug'\nimport withRetry from './withRetry'\n\nconst CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds\nconst READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes\n\nconst request = getIt([keepAlive(), promise()])\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nasync function downloadDocument(url: string): Promise<any> {\n const response = await withRetry<MiddlewareResponse>(() =>\n request({\n url,\n maxRedirects: 5,\n timeout: {connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT},\n }),\n )\n\n debug('Received document from %s with status code %d', url, response?.statusCode)\n\n return response.body\n}\n\nexport default downloadDocument\n","import {Readable} from 'node:stream'\n\nimport {type QueryParams, type SanityClient} from '@sanity/client'\n\ntype File = {\n name: string\n url: string\n type: string\n}\n\ntype GetBackupResponse = {\n createdAt: string\n totalFiles: number\n files: File[]\n nextCursor?: string\n}\n\nclass PaginatedGetBackupStream extends Readable {\n private cursor = ''\n private readonly client: SanityClient\n private readonly projectId: string\n private readonly datasetName: string\n private readonly backupId: string\n private readonly token: string\n public totalFiles = 0\n\n constructor(\n client: SanityClient,\n projectId: string,\n datasetName: string,\n backupId: string,\n token: string,\n ) {\n super({objectMode: true})\n this.client = client\n this.projectId = projectId\n this.datasetName = datasetName\n this.backupId = backupId\n this.token = token\n }\n\n async _read(): Promise<void> {\n try {\n const data = await this.fetchNextBackupPage()\n\n // Set totalFiles when it's fetched for the first time\n if (this.totalFiles === 0) {\n this.totalFiles = data.totalFiles\n }\n\n data.files.forEach((file: File) => this.push(file))\n\n if (typeof data.nextCursor === 'string' && data.nextCursor !== '') {\n this.cursor = data.nextCursor\n } else {\n // No more pages left to fetch.\n this.push(null)\n }\n } catch (err) {\n this.destroy(err as Error)\n }\n }\n\n // fetchNextBackupPage fetches the next page of backed up files from the backup API.\n async fetchNextBackupPage(): Promise<GetBackupResponse> {\n const query: QueryParams = this.cursor === '' ? {} : {nextCursor: this.cursor}\n\n try {\n return await this.client.request({\n headers: {Authorization: `Bearer ${this.token}`},\n uri: `/projects/${this.projectId}/datasets/${this.datasetName}/backups/${this.backupId}`,\n query,\n })\n } catch (error) {\n // It can be clearer to pull this logic out in a common error handling function for re-usability.\n let msg = error.statusCode ? error.response.body.message : error.message\n\n // If no message can be extracted, print the whole error.\n if (msg === undefined) {\n msg = String(error)\n }\n throw new Error(`Downloading dataset backup failed: ${msg}`)\n }\n }\n}\n\nexport {PaginatedGetBackupStream}\nexport type {File, GetBackupResponse}\n","import {type CliOutputter} from '@sanity/cli'\nimport prettyMs from 'pretty-ms'\n\ntype ProgressEvent = {\n step: string\n update?: boolean\n current?: number\n total?: number\n}\n\ninterface ProgressSpinner {\n set: (progress: ProgressEvent) => void\n update: (progress: ProgressEvent) => void\n succeed: () => void\n fail: () => void\n}\n\nconst newProgress = (output: CliOutputter, startStep: string): ProgressSpinner => {\n let spinner = output.spinner(startStep).start()\n let lastProgress: ProgressEvent = {step: startStep}\n let start = Date.now()\n\n const print = (progress: ProgressEvent) => {\n const elapsed = prettyMs(Date.now() - start)\n if (progress.current && progress.current > 0 && progress.total && progress.total > 0) {\n spinner.text = `${progress.step} (${progress.current}/${progress.total}) [${elapsed}]`\n } else {\n spinner.text = `${progress.step} [${elapsed}]`\n }\n }\n\n return {\n set: (progress: ProgressEvent) => {\n if (progress.step !== lastProgress.step) {\n print(lastProgress) // Print the last progress before moving on\n spinner.succeed()\n spinner = output.spinner(progress.step).start()\n start = Date.now()\n } else if (progress.step === lastProgress.step && progress.update) {\n print(progress)\n }\n lastProgress = progress\n },\n update: (progress: ProgressEvent) => {\n print(progress)\n lastProgress = progress\n },\n succeed: () => {\n spinner.succeed()\n start = Date.now()\n },\n fail: () => {\n spinner.fail()\n start = Date.now()\n },\n }\n}\n\nexport default newProgress\n","function humanFileSize(size: number): string {\n const i = size == 0 ? 0 : Math.floor(Math.log(size) / Math.log(1024))\n return `${(size / Math.pow(1024, i)).toFixed(2)} ${['B', 'kB', 'MB', 'GB', 'TB'][i]}`\n}\n\nexport default humanFileSize\n","function isPathDirName(filepath: string): boolean {\n // Check if the path has an extension, commonly indicating a file\n return !/\\.\\w+$/.test(filepath)\n}\n\nexport default isPathDirName\n","import {createWriteStream, existsSync, mkdirSync} from 'node:fs'\nimport {mkdtemp} from 'node:fs/promises'\nimport {tmpdir} from 'node:os'\nimport path from 'node:path'\nimport {finished} from 'node:stream/promises'\n\nimport {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n type SanityClient,\n} from '@sanity/cli'\nimport {absolutify} from '@sanity/util/fs'\nimport {Mutex} from 'async-mutex'\nimport createDebug from 'debug'\nimport {isString} from 'lodash'\nimport prettyMs from 'pretty-ms'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport archiveDir from '../../actions/backup/archiveDir'\nimport chooseBackupIdPrompt from '../../actions/backup/chooseBackupIdPrompt'\nimport cleanupTmpDir from '../../actions/backup/cleanupTmpDir'\nimport downloadAsset from '../../actions/backup/downloadAsset'\nimport downloadDocument from '../../actions/backup/downloadDocument'\nimport {type File, PaginatedGetBackupStream} from '../../actions/backup/fetchNextBackupPage'\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport newProgress from '../../actions/backup/progressSpinner'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport humanFileSize from '../../util/humanFileSize'\nimport isPathDirName from '../../util/isPathDirName'\nimport {defaultApiVersion} from './backupGroup'\n\nconst debug = createDebug('sanity:backup')\n\nconst DEFAULT_DOWNLOAD_CONCURRENCY = 10\nconst MAX_DOWNLOAD_CONCURRENCY = 24\n\ninterface DownloadBackupOptions {\n projectId: string\n datasetName: string\n token: string\n backupId: string\n outDir: string\n outFileName: string\n overwrite: boolean\n concurrency: number\n}\n\nconst helpText = `\nOptions\n --backup-id <string> The backup ID to download. (required)\n --out <string> The file or directory path the backup should download to.\n --overwrite Allows overwriting of existing backup file.\n --concurrency <num> Concurrent number of backup item downloads. (max: 24)\n\nExamples\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-1\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-2 --out /path/to/file\n sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('backup-id', {type: 'string'})\n .options('out', {type: 'string'})\n .options('concurrency', {type: 'number', default: DEFAULT_DOWNLOAD_CONCURRENCY})\n .options('overwrite', {type: 'boolean', default: false}).argv\n}\n\nconst downloadBackupCommand: CliCommandDefinition = {\n name: 'download',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Download a dataset backup to a local file.',\n helpText,\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {output, chalk} = context\n const [client, opts] = await prepareBackupOptions(context, args)\n const {projectId, datasetName, backupId, outDir, outFileName} = opts\n\n // If any of the output path or file name is empty, cancel the operation.\n if (outDir === '' || outFileName === '') {\n output.print('Operation cancelled.')\n return\n }\n const outFilePath = path.join(outDir, outFileName)\n\n output.print('╭───────────────────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Downloading backup for: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(56)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(datasetName).padEnd(58)} │`)\n output.print(`│ ${chalk.bold('backupId')}: ${chalk.cyan(backupId).padEnd(56)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────────────────╯')\n output.print('')\n output.print(`Downloading backup to \"${chalk.cyan(outFilePath)}\"`)\n\n const start = Date.now()\n const progressSpinner = newProgress(output, 'Setting up backup environment...')\n\n // Create a unique temporary directory to store files before bundling them into the archive at outputPath.\n // Temporary directories are normally deleted at the end of backup process, any unexpected exit may leave them\n // behind, hence it is important to create a unique directory for each attempt.\n const tmpOutDir = await mkdtemp(path.join(tmpdir(), `sanity-backup-`))\n\n // Create required directories if they don't exist.\n for (const dir of [outDir, path.join(tmpOutDir, 'images'), path.join(tmpOutDir, 'files')]) {\n mkdirSync(dir, {recursive: true})\n }\n\n debug('Writing to temporary directory %s', tmpOutDir)\n const tmpOutDocumentsFile = path.join(tmpOutDir, 'data.ndjson')\n\n // Handle concurrent writes to the same file using mutex.\n const docOutStream = createWriteStream(tmpOutDocumentsFile)\n const docWriteMutex = new Mutex()\n\n try {\n const backupFileStream = new PaginatedGetBackupStream(\n client,\n opts.projectId,\n opts.datasetName,\n opts.backupId,\n opts.token,\n )\n\n const files: File[] = []\n let i = 0\n for await (const file of backupFileStream) {\n files.push(file)\n i++\n progressSpinner.set({\n step: `Reading backup files...`,\n update: true,\n current: i,\n total: backupFileStream.totalFiles,\n })\n }\n\n let totalItemsDownloaded = 0\n // This is dynamically imported because this module is ESM only and this file gets compiled to CJS at this time.\n const {default: pMap} = await import('p-map')\n await pMap(\n files,\n async (file: File) => {\n if (file.type === 'file' || file.type === 'image') {\n await downloadAsset(file.url, file.name, file.type, tmpOutDir)\n } else {\n const doc = await downloadDocument(file.url)\n await docWriteMutex.runExclusive(() => {\n docOutStream.write(`${doc}\\n`)\n })\n }\n\n totalItemsDownloaded += 1\n progressSpinner.set({\n step: `Downloading documents and assets...`,\n update: true,\n current: totalItemsDownloaded,\n total: backupFileStream.totalFiles,\n })\n },\n {concurrency: opts.concurrency},\n )\n } catch (error) {\n progressSpinner.fail()\n const {message} = parseApiErr(error)\n throw new Error(`Downloading dataset backup failed: ${message}`)\n }\n\n docOutStream.end()\n await finished(docOutStream)\n\n progressSpinner.set({step: `Archiving files into a tarball...`, update: true})\n try {\n await archiveDir(tmpOutDir, outFilePath, (processedBytes: number) => {\n progressSpinner.update({\n step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...`,\n })\n })\n } catch (err) {\n progressSpinner.fail()\n throw new Error(`Archiving backup failed: ${err.message}`)\n }\n\n progressSpinner.set({\n step: `Cleaning up temporary files at ${chalk.cyan(`${tmpOutDir}`)}`,\n })\n await cleanupTmpDir(tmpOutDir)\n\n progressSpinner.set({\n step: `Backup download complete [${prettyMs(Date.now() - start)}]`,\n })\n progressSpinner.succeed()\n },\n}\n\n// prepareBackupOptions validates backup options from CLI and prepares Client and DownloadBackupOptions.\nasync function prepareBackupOptions(\n context: CliCommandContext,\n args: CliCommandArguments,\n): Promise<[SanityClient, DownloadBackupOptions]> {\n const flags = await parseCliFlags(args)\n const [dataset] = args.argsWithoutOptions\n const {prompt, workDir} = context\n const {projectId, datasetName, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n const {token} = client.config()\n if (!isString(token) || token.length < 1) {\n throw new Error(`token is missing`)\n }\n\n if (!isString(datasetName) || datasetName.length < 1) {\n throw new Error(`dataset ${datasetName} must be a valid dataset name`)\n }\n\n const backupId = String(flags['backup-id'] || (await chooseBackupIdPrompt(context, datasetName)))\n if (backupId.length < 1) {\n throw new Error(`backup-id ${flags['backup-id']} should be a valid string`)\n }\n\n if ('concurrency' in flags) {\n if (flags.concurrency < 1 || flags.concurrency > MAX_DOWNLOAD_CONCURRENCY) {\n throw new Error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`)\n }\n }\n\n const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`\n let out = await (async (): Promise<string> => {\n if (flags.out !== undefined) {\n // Rewrite the output path to an absolute path, if it is not already.\n return absolutify(flags.out)\n }\n\n const input = await prompt.single({\n type: 'input',\n message: 'Output path:',\n default: path.join(workDir, defaultOutFileName),\n filter: absolutify,\n })\n return input\n })()\n\n // If path is a directory name, then add a default file name to the path.\n if (isPathDirName(out)) {\n out = path.join(out, defaultOutFileName)\n }\n\n // If the file already exists, ask for confirmation if it should be overwritten.\n if (!flags.overwrite && existsSync(out)) {\n const shouldOverwrite = await prompt.single({\n type: 'confirm',\n message: `File \"${out}\" already exists, would you like to overwrite it?`,\n default: false,\n })\n\n // If the user does not want to overwrite the file, set the output path to an empty string.\n // This should be handled by the caller of this function as cancel operation.\n if (!shouldOverwrite) {\n out = ''\n }\n }\n\n return [\n client,\n {\n projectId,\n datasetName,\n backupId,\n token,\n outDir: path.dirname(out),\n outFileName: path.basename(out),\n overwrite: flags.overwrite,\n concurrency: flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY,\n },\n ]\n}\n\nexport default downloadBackupCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst helpText = `\nExamples\n sanity backup enable DATASET_NAME\n`\n\nconst enableDatasetBackupCommand: CliCommandDefinition = {\n name: 'enable',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'Enable backup for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const [dataset] = args.argsWithoutOptions\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n try {\n await client.request({\n method: 'PUT',\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,\n body: {\n enabled: true,\n },\n })\n\n output.print(\n `${chalk.green(\n `Enabled backups for dataset ${datasetName}.\\nPlease note that it may take up to 24 hours before the first backup is created.\\n`,\n )}`,\n )\n\n output.print(\n `${chalk.bold(`Retention policies may apply depending on your plan and agreement.\\n`)}`,\n )\n } catch (error) {\n const {message} = parseApiErr(error)\n output.print(`${chalk.red(`Enabling dataset backup failed: ${message}`)}\\n`)\n }\n },\n}\nexport default enableDatasetBackupCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {Table} from 'console-table-printer'\nimport {isAfter, isValid, lightFormat, parse} from 'date-fns'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport parseApiErr from '../../actions/backup/parseApiErr'\nimport resolveApiClient from '../../actions/backup/resolveApiClient'\nimport {defaultApiVersion} from './backupGroup'\n\nconst DEFAULT_LIST_BACKUP_LIMIT = 30\n\ninterface ListDatasetBackupFlags {\n before?: string\n after?: string\n limit?: string\n}\n\ntype ListBackupRequestQueryParams = {\n before?: string\n after?: string\n limit: string\n}\n\ntype ListBackupResponse = {\n backups: ListBackupResponseItem[]\n}\n\ntype ListBackupResponseItem = {\n id: string\n createdAt: string\n}\n\nconst helpText = `\nOptions\n --limit <int> Maximum number of backups returned. Default 30.\n --after <string> Only return backups after this date (inclusive)\n --before <string> Only return backups before this date (exclusive). Cannot be younger than <after> if specified.\n\nExamples\n sanity backup list DATASET_NAME\n sanity backup list DATASET_NAME --limit 50\n sanity backup list DATASET_NAME --after 2024-01-31 --limit 10\n sanity backup list DATASET_NAME --after 2024-01-31 --before 2024-01-10\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('after', {type: 'string'})\n .options('before', {type: 'string'})\n .options('limit', {type: 'number', default: DEFAULT_LIST_BACKUP_LIMIT, alias: 'l'}).argv\n}\n\nconst listDatasetBackupCommand: CliCommandDefinition<ListDatasetBackupFlags> = {\n name: 'list',\n group: 'backup',\n signature: '[DATASET_NAME]',\n description: 'List available backups for a dataset.',\n helpText,\n action: async (args, context) => {\n const {output, chalk} = context\n const flags = await parseCliFlags(args)\n const [dataset] = args.argsWithoutOptions\n\n const {projectId, datasetName, token, client} = await resolveApiClient(\n context,\n dataset,\n defaultApiVersion,\n )\n\n const query: ListBackupRequestQueryParams = {limit: DEFAULT_LIST_BACKUP_LIMIT.toString()}\n if (flags.limit) {\n // We allow limit up to Number.MAX_SAFE_INTEGER to leave it for server-side validation,\n // while still sending sensible value in limit string.\n if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER) {\n throw new Error(\n `Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`,\n )\n }\n query.limit = flags.limit.toString()\n }\n\n if (flags.before || flags.after) {\n try {\n const parsedBefore = processDateFlags(flags.before)\n const parsedAfter = processDateFlags(flags.after)\n\n if (parsedAfter && parsedBefore && isAfter(parsedAfter, parsedBefore)) {\n throw new Error('--after date must be before --before')\n }\n\n query.before = flags.before\n query.after = flags.after\n } catch (err) {\n throw new Error(`Parsing date flags: ${err}`)\n }\n }\n\n let response\n try {\n response = await client.request<ListBackupResponse>({\n headers: {Authorization: `Bearer ${token}`},\n uri: `/projects/${projectId}/datasets/${datasetName}/backups`,\n query: {...query},\n })\n } catch (error) {\n const {message} = parseApiErr(error)\n output.error(`${chalk.red(`List dataset backup failed: ${message}`)}\\n`)\n }\n\n if (response && response.backups) {\n if (response.backups.length === 0) {\n output.print('No backups found.')\n return\n }\n\n const table = new Table({\n columns: [\n {name: 'resource', title: 'RESOURCE', alignment: 'left'},\n {name: 'createdAt', title: 'CREATED AT', alignment: 'left'},\n {name: 'backupId', title: 'BACKUP ID', alignment: 'left'},\n ],\n })\n\n response.backups.forEach((backup: ListBackupResponseItem) => {\n const {id, createdAt} = backup\n table.addRow({\n resource: 'Dataset',\n createdAt: lightFormat(Date.parse(createdAt), 'yyyy-MM-dd HH:mm:ss'),\n backupId: id,\n })\n })\n\n table.printTable()\n }\n },\n}\n\nfunction processDateFlags(date: string | undefined): Date | undefined {\n if (!date) return undefined\n const parsedDate = parse(date, 'yyyy-MM-dd', new Date())\n if (isValid(parsedDate)) {\n return parsedDate\n }\n\n throw new Error(`Invalid ${date} date format. Use YYYY-MM-DD`)\n}\n\nexport default listDatasetBackupCommand\n","import type {CliCommandArguments, CliCommandContext, CliCommandDefinition} from '@sanity/cli'\nimport {BuildSanityStudioCommandFlags} from '../../actions/build/buildAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --auto-updates / --no-auto-updates Enable/disable auto updates of studio versions\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n --schema-path If you are storing your schemas in a different path than the default one, you need to specify it here.\n\nExamples\n sanity build\n sanity build --no-minify --source-maps\n`\n\nconst buildCommand: CliCommandDefinition = {\n name: 'build',\n signature: '[OUTPUT_DIR]',\n description: 'Builds the Sanity Studio configuration into a static bundle',\n action: async (\n args: CliCommandArguments<BuildSanityStudioCommandFlags>,\n context: CliCommandContext,\n overrides?: {basePath?: string},\n ) => {\n const buildAction = await getBuildAction()\n\n return buildAction(args, context, overrides)\n },\n helpText,\n}\n\nasync function getBuildAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions\n const mod: typeof import('../../actions/build/buildAction') = require('../../actions/build/buildAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/build/buildAction')\n\n return mod.default\n}\n\nexport default buildCommand\n","import url from 'node:url'\n\nimport {type CliCommandContext, type CliPrompter} from '@sanity/cli'\nimport logSymbols from 'log-symbols'\nimport oneline from 'oneline'\n\nconst wildcardReplacement = 'a-wild-card-r3pl4c3m3n7-a'\nconst portReplacement = ':7777777'\n\ninterface AddCorsOriginFlags {\n credentials?: boolean\n}\n\nexport async function addCorsOrigin(\n givenOrigin: string,\n flags: AddCorsOriginFlags,\n context: CliCommandContext,\n): Promise<boolean> {\n const {apiClient, prompt, output} = context\n const origin = await (givenOrigin\n ? filterAndValidateOrigin(givenOrigin)\n : promptForOrigin(prompt))\n\n const hasWildcard = origin.includes('*')\n if (hasWildcard && !(await promptForWildcardConfirmation(origin, context))) {\n return false\n }\n const allowCredentials =\n typeof flags.credentials === 'undefined'\n ? await promptForCredentials(hasWildcard, context)\n : Boolean(flags.credentials)\n\n if (givenOrigin !== origin) {\n output.print(`Normalized origin to ${origin}`)\n }\n\n const client = apiClient({\n requireUser: true,\n requireProject: true,\n })\n\n await client.request({\n method: 'POST',\n url: '/cors',\n body: {origin, allowCredentials},\n maxRedirects: 0,\n })\n\n return true\n}\n\nfunction promptForCredentials(hasWildcard: boolean, context: CliCommandContext): Promise<string> {\n const {prompt, output, chalk} = context\n\n output.print('')\n if (hasWildcard) {\n output.print(oneline`\n ${chalk.yellow(`${logSymbols.warning} Warning:`)}\n We ${chalk.red(chalk.underline('HIGHLY'))} recommend NOT allowing credentials\n on origins containing wildcards. If you are logged in to a studio, people will\n be able to send requests ${chalk.underline('on your behalf')} to read and modify\n data, from any matching origin. Please tread carefully!\n `)\n } else {\n output.print(oneline`\n ${chalk.yellow(`${logSymbols.warning} Warning:`)}\n Should this origin be allowed to send requests using authentication tokens or\n session cookies? Be aware that any script on this origin will be able to send\n requests ${chalk.underline('on your behalf')} to read and modify data if you\n are logged in to a Sanity studio. If this origin hosts a studio, you will need\n this, otherwise you should probably answer \"No\" (n).\n `)\n }\n\n output.print('')\n\n return prompt.single({\n type: 'confirm',\n message: oneline`\n Allow credentials to be sent from this origin? Please read the warning above.\n `,\n default: false,\n })\n}\n\nfunction promptForWildcardConfirmation(\n origin: string,\n context: CliCommandContext,\n): Promise<boolean> {\n const {prompt, output, chalk} = context\n\n output.print('')\n output.print(chalk.yellow(`${logSymbols.warning} Warning: Examples of allowed origins:`))\n\n if (origin === '*') {\n output.print('- http://www.some-malicious.site')\n output.print('- https://not.what-you-were-expecting.com')\n output.print('- https://high-traffic-site.com')\n output.print('- http://192.168.1.1:8080')\n } else {\n output.print(`- ${origin.replace(/:\\*/, ':1234').replace(/\\*/g, 'foo')}`)\n output.print(`- ${origin.replace(/:\\*/, ':3030').replace(/\\*/g, 'foo.bar')}`)\n }\n\n output.print('')\n\n return prompt.single({\n type: 'confirm',\n message: oneline`\n Using wildcards can be ${chalk.red('risky')}.\n Are you ${chalk.underline('absolutely sure')} you want to allow this origin?`,\n default: false,\n })\n}\n\nfunction promptForOrigin(prompt: CliPrompter): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Origin (including protocol):',\n filter: filterOrigin,\n validate: (origin) => validateOrigin(origin, origin),\n })\n}\n\nfunction filterOrigin(origin: string): string | null {\n if (origin === '*' || origin === 'file:///*' || origin === 'null') {\n return origin\n }\n\n try {\n const example = origin\n .replace(/([^:])\\*/g, `$1${wildcardReplacement}`)\n .replace(/:\\*/, portReplacement)\n\n const parsed = url.parse(example)\n let host = parsed.host || ''\n if (/^https?:$/.test(parsed.protocol || '')) {\n host = host.replace(/:(80|443)$/, '')\n }\n\n host = host.replace(portReplacement, ':*').replace(new RegExp(wildcardReplacement, 'g'), '*')\n\n return `${parsed.protocol}//${host}`\n } catch (err) {\n return null\n }\n}\n\nfunction validateOrigin(origin: string | null, givenOrigin: string): true | string {\n if (origin === '*' || origin === 'file:///*' || origin === 'null') {\n return true\n }\n\n try {\n url.parse(origin || (0 as any as string)) // Use 0 to trigger error for unset values\n return true\n } catch (err) {\n // Fall-through to error\n }\n\n if (/^file:\\/\\//.test(givenOrigin)) {\n return `Only a local file wildcard is currently allowed: file:///*`\n }\n\n return `Invalid origin \"${givenOrigin}\", must include protocol (https://some.host)`\n}\n\nfunction filterAndValidateOrigin(givenOrigin: string): string {\n const origin = filterOrigin(givenOrigin)\n const result = validateOrigin(origin, givenOrigin)\n if (result !== true) {\n throw new Error(result)\n }\n\n if (!origin) {\n throw new Error('Invalid origin')\n }\n\n return origin\n}\n","import fs from 'node:fs'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\n\nimport {addCorsOrigin} from '../../actions/cors/addCorsOrigin'\n\nconst helpText = `\nOptions\n --credentials Allow credentials (token/cookie) to be sent from this origin\n --no-credentials Disallow credentials (token/cookie) to be sent from this origin\n\nExamples\n sanity cors add\n sanity cors add http://localhost:3000 --no-credentials\n`\n\nconst addCorsOriginCommand: CliCommandDefinition = {\n name: 'add',\n group: 'cors',\n signature: '[ORIGIN]',\n helpText,\n description: 'Allow a new origin to use your project API through CORS',\n action: async (args, context) => {\n const {output} = context\n const [origin] = args.argsWithoutOptions\n\n if (!origin) {\n throw new Error('No origin specified, use `sanity cors add <origin-url>`')\n }\n\n const flags = args.extOptions\n\n // eslint-disable-next-line no-sync\n const isFile = fs.existsSync(path.join(process.cwd(), origin))\n if (isFile) {\n output.warn(`Origin \"${origin}?\" Remember to quote values (sanity cors add \"*\")`)\n }\n\n const success = await addCorsOrigin(origin, flags, context)\n if (success) {\n output.print('CORS origin added successfully')\n }\n },\n}\n\nexport default addCorsOriginCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst corsGroup: CliCommandGroupDefinition = {\n name: 'cors',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Configures CORS settings for Sanity projects',\n}\n\nexport default corsGroup\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type CorsOrigin} from './types'\n\nconst helpText = `\nExamples\n sanity cors delete\n sanity cors delete http://localhost:3000\n`\n\nconst deleteCorsOriginCommand: CliCommandDefinition = {\n name: 'delete',\n group: 'cors',\n signature: '[ORIGIN]',\n helpText,\n description: 'Delete an existing CORS-origin from your project',\n action: async (args, context) => {\n const {output, apiClient} = context\n const [origin] = args.argsWithoutOptions\n const client = apiClient({requireUser: true, requireProject: true})\n const originId = await promptForOrigin(origin, context)\n try {\n await client.request({method: 'DELETE', uri: `/cors/${originId}`})\n output.print('Origin deleted')\n } catch (err) {\n throw new Error(`Origin deletion failed:\\n${err.message}`)\n }\n },\n}\n\nexport default deleteCorsOriginCommand\n\nasync function promptForOrigin(specified: string | undefined, context: CliCommandContext) {\n const specifiedOrigin = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient({requireUser: true, requireProject: true})\n\n const origins = await client.request<CorsOrigin[]>({url: '/cors'})\n if (specifiedOrigin) {\n const selected = origins.filter((origin) => origin.origin.toLowerCase() === specifiedOrigin)[0]\n if (!selected) {\n throw new Error(`Origin \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n const choices = origins.map((origin) => ({value: origin.id, name: origin.origin}))\n return prompt.single({\n message: 'Select origin to delete',\n type: 'list',\n choices,\n })\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type CorsOrigin} from './types'\n\nconst helpText = `\nExamples\n sanity cors list\n`\n\nconst listCorsOriginsCommand: CliCommandDefinition = {\n name: 'list',\n group: 'cors',\n signature: '',\n helpText,\n description: 'List all origins allowed to access the API for this project',\n action: async (args, context) => {\n const {output} = context\n const {apiClient} = context\n const client = apiClient({requireUser: true, requireProject: true})\n const origins = await client.request<CorsOrigin[]>({url: '/cors'})\n output.print(origins.map((origin) => origin.origin).join('\\n'))\n },\n}\n\nexport default listCorsOriginsCommand\n","const MAX_DATASET_NAME_LENGTH = 64\n\nexport function validateDatasetAliasName(datasetName: string): false | string {\n if (!datasetName) {\n return 'Alias name is missing'\n }\n\n const name = `${datasetName}`\n\n if (name.toLowerCase() !== name) {\n return 'Alias name must be all lowercase characters'\n }\n\n if (name.length < 2) {\n return 'Alias name must be at least two characters long'\n }\n\n if (name.length > MAX_DATASET_NAME_LENGTH) {\n return `Alias name must be at most ${MAX_DATASET_NAME_LENGTH} characters`\n }\n\n if (!/^[a-z0-9~]/.test(name)) {\n return 'Alias name must start with a letter or a number'\n }\n\n if (!/^[a-z0-9~][-_a-z0-9]+$/.test(name)) {\n return 'Alias name must only contain letters, numbers, dashes and underscores'\n }\n\n if (/[-_]$/.test(name)) {\n return 'Alias name must not end with a dash or an underscore'\n }\n\n return false\n}\n","import {type CliPrompter} from '@sanity/cli'\n\nimport {validateDatasetAliasName} from './validateDatasetAliasName'\n\nexport function promptForDatasetAliasName(\n prompt: CliPrompter,\n options: {message?: string; default?: string} = {},\n): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Alias name:',\n validate: (name) => {\n const err = validateDatasetAliasName(name)\n if (err) {\n return err\n }\n\n return true\n },\n ...options,\n })\n}\n","import {type SanityClient} from '@sanity/client'\n\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {type DatasetAliasDefinition, type DatasetModificationResponse} from './types'\n\nexport const ALIAS_PREFIX = '~'\n\nexport function listAliases(client: SanityClient): Promise<DatasetAliasDefinition[]> {\n return client.request<DatasetAliasDefinition[]>({uri: '/aliases'})\n}\n\nexport function createAlias(\n client: SanityClient,\n aliasName: string,\n datasetName: string | null,\n): Promise<DatasetModificationResponse> {\n return modify(client, 'PUT', aliasName, datasetName ? {datasetName} : undefined)\n}\n\nexport function updateAlias(\n client: SanityClient,\n aliasName: string,\n datasetName: string | null,\n): Promise<DatasetModificationResponse> {\n return modify(client, 'PATCH', aliasName, datasetName ? {datasetName} : undefined)\n}\n\nexport function unlinkAlias(\n client: SanityClient,\n aliasName: string,\n): Promise<DatasetModificationResponse> {\n validateDatasetAliasName(aliasName)\n return modify(client, 'PATCH', `${aliasName}/unlink`, {})\n}\n\nexport function removeAlias(client: SanityClient, aliasName: string): Promise<{deleted: boolean}> {\n return modify(client, 'DELETE', aliasName)\n}\n\nfunction modify(\n client: SanityClient,\n method: string,\n aliasName: string,\n body?: {datasetName?: string},\n) {\n return client.request({method, uri: `/aliases/${aliasName}`, body})\n}\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {promptForDatasetName} from '../../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../../actions/dataset/validateDatasetName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const createAliasHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias, targetDataset] = args.argsWithoutOptions\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, aliases, projectFeatures] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n aliasClient.listAliases(client).then((sets) => sets.map((ds) => ds.name)),\n client.request({uri: '/features'}),\n ])\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n if (aliases.includes(aliasName)) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" already exists`)\n }\n\n if (targetDataset) {\n const datasetErr = validateDatasetName(targetDataset)\n if (datasetErr) {\n throw new Error(datasetErr)\n }\n }\n\n const datasetName = await (targetDataset || promptForDatasetName(prompt))\n if (datasetName && !datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" does not exist `)\n }\n\n const canCreateAlias = projectFeatures.includes('advancedDatasetManagement')\n if (!canCreateAlias) {\n throw new Error(`This project cannot create a dataset alias`)\n }\n\n try {\n await aliasClient.createAlias(client, aliasName, datasetName)\n output.print(\n `Dataset alias ${aliasOutputName} created ${\n datasetName && `and linked to ${datasetName}`\n } successfully`,\n )\n } catch (err) {\n throw new Error(`Dataset alias creation failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandAction} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\ninterface DeleteAliasFlags {\n force?: boolean\n}\n\nexport const deleteAliasHandler: CliCommandAction<DeleteAliasFlags> = async (args, context) => {\n const {apiClient, prompt, output} = context\n const [, ds] = args.argsWithoutOptions\n const {force} = await parseCliFlags(args)\n const client = apiClient()\n if (!ds) {\n throw new Error('Dataset alias name must be provided')\n }\n\n let aliasName = `${ds}`\n const dsError = validateDatasetAliasName(aliasName)\n if (dsError) {\n throw dsError\n }\n aliasName = aliasName.startsWith(ALIAS_PREFIX) ? aliasName.slice(1) : aliasName\n\n const [fetchedAliases] = await Promise.all([aliasClient.listAliases(client)])\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n const message =\n linkedAlias && linkedAlias.datasetName\n ? `This dataset alias is linked to ${linkedAlias.datasetName}. `\n : ''\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, deleting alias \"${aliasName}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message: `${message}Are you ABSOLUTELY sure you want to delete this dataset alias?\\n Type the name of the dataset alias to confirm delete: `,\n filter: (input) => `${input}`.trim(),\n validate: (input) => {\n return input === aliasName || 'Incorrect dataset alias name. Ctrl + C to cancel delete.'\n },\n })\n }\n\n return aliasClient.removeAlias(client, aliasName).then(() => {\n output.print('Dataset alias deleted successfully')\n })\n}\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport {promptForDatasetName} from '../../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../../actions/dataset/validateDatasetName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const linkAliasHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias, targetDataset] = args.argsWithoutOptions\n const flags = args.extOptions\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, fetchedAliases] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n aliasClient.listAliases(client),\n ])\n const aliases = fetchedAliases.map((da) => da.name)\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n if (!aliases.includes(aliasName)) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" does not exist `)\n }\n\n const datasetName = await (targetDataset || promptForDatasetName(prompt))\n const datasetErr = validateDatasetName(datasetName)\n if (datasetErr) {\n throw new Error(datasetErr)\n }\n\n if (!datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" does not exist `)\n }\n\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n\n if (linkedAlias && linkedAlias.datasetName) {\n if (linkedAlias.datasetName === datasetName) {\n throw new Error(`Dataset alias ${aliasOutputName} already linked to ${datasetName}`)\n }\n\n if (!flags.force) {\n await prompt.single({\n type: 'input',\n message: `This alias is linked to dataset <${linkedAlias.datasetName}>. Are you ABSOLUTELY sure you want to link this dataset alias to this dataset?\n \\n Type YES/NO: `,\n filter: (input) => `${input}`.toLowerCase(),\n validate: (input) => {\n return input === 'yes' || 'Ctrl + C to cancel dataset alias link.'\n },\n })\n }\n }\n\n try {\n await aliasClient.updateAlias(client, aliasName, datasetName)\n output.print(`Dataset alias ${aliasOutputName} linked to ${datasetName} successfully`)\n } catch (err) {\n throw new Error(`Dataset alias link failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandAction} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {promptForDatasetAliasName} from '../../../actions/dataset/alias/promptForDatasetAliasName'\nimport {validateDatasetAliasName} from '../../../actions/dataset/alias/validateDatasetAliasName'\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\ninterface UnlinkFlags {\n force?: boolean\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\nexport const unlinkAliasHandler: CliCommandAction<UnlinkFlags> = async (args, context) => {\n const {apiClient, output, prompt} = context\n const [, alias] = args.argsWithoutOptions\n const {force} = await parseCliFlags(args)\n const client = apiClient()\n\n const nameError = alias && validateDatasetAliasName(alias)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const fetchedAliases = await aliasClient.listAliases(client)\n\n let aliasName = await (alias || promptForDatasetAliasName(prompt))\n let aliasOutputName = aliasName\n\n if (aliasName.startsWith(ALIAS_PREFIX)) {\n aliasName = aliasName.slice(1)\n } else {\n aliasOutputName = `${ALIAS_PREFIX}${aliasName}`\n }\n\n // get the current alias from the remote alias list\n const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName)\n if (!linkedAlias) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" does not exist`)\n }\n\n if (!linkedAlias.datasetName) {\n throw new Error(`Dataset alias \"${aliasOutputName}\" is not linked to a dataset`)\n }\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, unlinking alias \"${aliasOutputName}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message: `Are you ABSOLUTELY sure you want to unlink this alias from the \"${linkedAlias.datasetName}\" dataset?\n \\n Type YES/NO: `,\n filter: (input) => `${input}`.toLowerCase(),\n validate: (input) => {\n return input === 'yes' || 'Ctrl + C to cancel dataset alias unlink.'\n },\n })\n }\n\n try {\n const result = await aliasClient.unlinkAlias(client, aliasName)\n output.print(\n `Dataset alias ${aliasOutputName} unlinked from ${result.datasetName} successfully`,\n )\n } catch (err) {\n throw new Error(`Dataset alias unlink failed:\\n${err.message}`)\n }\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport oneline from 'oneline'\n\nimport {createAliasHandler} from './createAliasHandler'\nimport {deleteAliasHandler} from './deleteAliasHandler'\nimport {linkAliasHandler} from './linkAliasHandler'\nimport {unlinkAliasHandler} from './unlinkAliasHandler'\n\nconst helpText = `\nBelow are examples of the alias subcommand\n\nCreate Alias\n sanity dataset alias create\n sanity dataset alias create <alias-name>\n sanity dataset alias create <alias-name> <target-dataset>\n\nDelete Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias delete <alias-name>\n sanity dataset alias delete <alias-name> --force\n\nLink Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias link\n sanity dataset alias link <alias-name>\n sanity dataset alias link <alias-name> <target-dataset>\n sanity dataset alias link <alias-name> <target-dataset> --force\n\nUn-link Alias\n Options\n --force Skips security prompt and forces link command\n\n Usage\n sanity dataset alias unlink\n sanity dataset alias unlink <alias-name>\n sanity dataset alias unlink <alias-name> --force\n`\n\nconst aliasCommand: CliCommandDefinition = {\n name: 'alias',\n group: 'dataset',\n signature: 'SUBCOMMAND [ALIAS_NAME, TARGET_DATASET]',\n helpText,\n description: 'You can manage your dataset alias using this command.',\n action: async (args, context) => {\n const [verb] = args.argsWithoutOptions\n switch (verb) {\n case 'create':\n await createAliasHandler(args, context)\n break\n case 'delete':\n await deleteAliasHandler(args, context)\n break\n case 'unlink':\n await unlinkAliasHandler(args, context)\n break\n case 'link':\n await linkAliasHandler(args, context)\n break\n default:\n throw new Error(oneline`\n Invalid command provided. Available commands are: create, delete, link and unlink.\n For more guide run the help command 'sanity dataset alias --help'\n `)\n }\n },\n}\n\nexport default aliasCommand\n","import {type CliCommandContext} from '@sanity/cli'\nimport {Table} from 'console-table-printer'\nimport {formatDistance, formatDistanceToNow, parseISO} from 'date-fns'\n\ninterface ListFlags {\n offset?: number\n limit?: number\n}\n\ntype CopyDatasetListResponse = {\n id: string\n state: string\n createdAt: string\n updatedAt: string\n sourceDataset: string\n targetDataset: string\n withHistory: boolean\n}[]\n\nexport async function listDatasetCopyJobs(\n flags: ListFlags,\n context: CliCommandContext,\n): Promise<void> {\n const {apiClient, output, chalk} = context\n const client = apiClient()\n const projectId = client.config().projectId\n const query: {offset?: string; limit?: string} = {}\n let response\n\n if (flags.offset && flags.offset >= 0) {\n query.offset = `${flags.offset}`\n }\n if (flags.limit && flags.limit > 0) {\n query.limit = `${flags.limit}`\n }\n\n try {\n response = await client.request<CopyDatasetListResponse>({\n method: 'GET',\n uri: `/projects/${projectId}/datasets/copy`,\n query,\n })\n } catch (error) {\n if (error.statusCode) {\n output.error(`${chalk.red(`Dataset copy list failed:\\n${error.response.body.message}`)}\\n`)\n } else {\n output.error(`${chalk.red(`Dataset copy list failed:\\n${error.message}`)}\\n`)\n }\n }\n\n if (response && response.length > 0) {\n const table = new Table({\n title: 'Dataset copy jobs for this project in descending order',\n columns: [\n {name: 'id', title: 'Job ID', alignment: 'left'},\n {name: 'sourceDataset', title: 'Source Dataset', alignment: 'left'},\n {name: 'targetDataset', title: 'Target Dataset', alignment: 'left'},\n {name: 'state', title: 'State', alignment: 'left'},\n {name: 'withHistory', title: 'With history', alignment: 'left'},\n {name: 'timeStarted', title: 'Time started', alignment: 'left'},\n {name: 'timeTaken', title: 'Time taken', alignment: 'left'},\n ],\n })\n\n response.forEach((job) => {\n const {id, state, createdAt, updatedAt, sourceDataset, targetDataset, withHistory} = job\n\n let timeStarted = ''\n if (createdAt !== '') {\n timeStarted = formatDistanceToNow(parseISO(createdAt))\n }\n\n let timeTaken = ''\n if (updatedAt !== '') {\n timeTaken = formatDistance(parseISO(updatedAt), parseISO(createdAt))\n }\n\n let color\n switch (state) {\n case 'completed':\n color = 'green'\n break\n case 'failed':\n color = 'red'\n break\n case 'pending':\n color = 'yellow'\n break\n default:\n color = ''\n }\n\n table.addRow(\n {\n id,\n state,\n withHistory,\n timeStarted: `${timeStarted} ago`,\n timeTaken,\n sourceDataset,\n targetDataset,\n },\n {color},\n )\n })\n\n table.printTable()\n } else {\n output.print(\"This project doesn't have any dataset copy jobs\")\n }\n}\n","import {type SanityClient} from '@sanity/client'\n\nexport const getClientUrl = (client: SanityClient, uri: string, useCdn = false): string => {\n const config = client.config()\n const base = useCdn ? config.cdnUrl : config.url\n return `${base}/${uri.replace(/^\\//, '')}`\n}\n","import {type CliCommandDefinition, type CliOutputter} from '@sanity/cli'\nimport {type SanityClient} from '@sanity/client'\nimport EventSource from '@sanity/eventsource'\nimport {Observable} from 'rxjs'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {promptForDatasetName} from '../../actions/dataset/datasetNamePrompt'\nimport {listDatasetCopyJobs} from '../../actions/dataset/listDatasetCopyJobs'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\nimport {getClientUrl} from '../../util/getClientUrl'\n\nconst helpText = `\nOptions\n --detach Start the copy without waiting for it to finish\n --attach <job-id> Attach to the running copy process to show progress\n --skip-history Don't preserve document history on copy\n --list Lists all dataset copy jobs corresponding to a certain criteria.\n --offset Start position in the list of jobs. Default 0. With --list.\n --limit Maximum number of jobs returned. Default 10. Maximum 1000. With --list.\n\nExamples\n sanity dataset copy\n sanity dataset copy <source-dataset>\n sanity dataset copy <source-dataset> <target-dataset>\n sanity dataset copy --skip-history <source-dataset> <target-dataset>\n sanity dataset copy --detach <source-dataset> <target-dataset>\n sanity dataset copy --attach <job-id>\n sanity dataset copy --list\n sanity dataset copy --list --offset=2\n sanity dataset copy --list --offset=2 --limit=10\n`\n\ninterface CopyProgressStreamEvent {\n type: 'reconnect' | string\n progress?: number\n}\n\ninterface CopyDatasetFlags {\n 'list'?: boolean\n 'attach'?: string\n 'detach'?: boolean\n 'offset'?: number\n 'limit'?: number\n 'skip-history'?: boolean\n}\n\ninterface CopyDatasetResponse {\n jobId: string\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .option('attach', {type: 'string'})\n .option('list', {type: 'boolean'})\n .option('limit', {type: 'number'})\n .option('offset', {type: 'number'})\n .option('skip-history', {type: 'boolean'})\n .option('detach', {type: 'boolean'}).argv\n}\n\nconst progress = (url: string) => {\n return new Observable<CopyProgressStreamEvent>((observer) => {\n let progressSource = new EventSource(url)\n let stopped = false\n\n function onError(error: unknown) {\n if (progressSource) {\n progressSource.close()\n }\n\n debug(`Error received: ${error}`)\n if (stopped) {\n return\n }\n observer.next({type: 'reconnect'})\n progressSource = new EventSource(url)\n }\n\n function onChannelError(error: MessageEvent) {\n stopped = true\n progressSource.close()\n observer.error(error)\n }\n\n function onMessage(event: MessageEvent) {\n const data = JSON.parse(event.data)\n if (data.state === 'failed') {\n debug('Job failed. Data: %o', event)\n observer.error(event)\n } else if (data.state === 'completed') {\n debug('Job succeeded. Data: %o', event)\n onComplete()\n } else {\n debug(`Job progressed. Data: %o`, event)\n observer.next(data)\n }\n }\n\n function onComplete() {\n progressSource.removeEventListener('error', onError)\n progressSource.removeEventListener('channel_error', onChannelError)\n progressSource.removeEventListener('job', onMessage)\n progressSource.removeEventListener('done', onComplete)\n progressSource.close()\n observer.complete()\n }\n\n progressSource.addEventListener('error', onError)\n progressSource.addEventListener('channel_error', onChannelError)\n progressSource.addEventListener('job', onMessage)\n progressSource.addEventListener('done', onComplete)\n })\n}\n\nconst followProgress = (\n jobId: string,\n client: SanityClient,\n output: CliOutputter,\n): Promise<void> => {\n let currentProgress = 0\n\n const spinner = output.spinner({}).start()\n const listenUrl = getClientUrl(client, `jobs/${jobId}/listen`)\n\n debug(`Listening to ${listenUrl}`)\n\n return new Promise((resolve, reject) => {\n progress(listenUrl).subscribe({\n next: (event) => {\n if (typeof event.progress === 'number') {\n currentProgress = event.progress\n }\n\n spinner.text = `Copy in progress: ${currentProgress}%`\n },\n error: (err) => {\n spinner.fail()\n reject(new Error(`${err.data}`))\n },\n complete: () => {\n spinner.succeed('Copy finished.')\n resolve()\n },\n })\n })\n}\n\nconst copyDatasetCommand: CliCommandDefinition<CopyDatasetFlags> = {\n name: 'copy',\n group: 'dataset',\n signature: '[SOURCE_DATASET] [TARGET_DATASET]',\n helpText,\n description:\n 'Manages dataset copying, including starting a new copy job, listing copy jobs and following the progress of a running copy job',\n action: async (args, context) => {\n const {apiClient, output, prompt, chalk} = context\n // Reparsing CLI flags for better control of binary flags\n const flags: CopyDatasetFlags = await parseCliFlags(args)\n const client = apiClient()\n\n if (flags.list) {\n await listDatasetCopyJobs(flags, context)\n return\n }\n\n if (flags.attach) {\n const jobId = flags.attach\n\n if (!jobId) {\n throw new Error('Please supply a jobId')\n }\n\n await followProgress(jobId, client, output)\n return\n }\n\n const [sourceDataset, targetDataset] = args.argsWithoutOptions\n const shouldSkipHistory = Boolean(flags['skip-history'])\n\n const nameError = sourceDataset && validateDatasetName(sourceDataset)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const existingDatasets = await client.datasets\n .list()\n .then((datasets) => datasets.map((ds) => ds.name))\n\n const sourceDatasetName = await (sourceDataset ||\n promptForDatasetName(prompt, {message: 'Source dataset name:'}))\n if (!existingDatasets.includes(sourceDatasetName)) {\n throw new Error(`Source dataset \"${sourceDatasetName}\" doesn't exist`)\n }\n\n const targetDatasetName = await (targetDataset ||\n promptForDatasetName(prompt, {message: 'Target dataset name:'}))\n if (existingDatasets.includes(targetDatasetName)) {\n throw new Error(`Target dataset \"${targetDatasetName}\" already exists`)\n }\n\n const err = validateDatasetName(targetDatasetName)\n if (err) {\n throw new Error(err)\n }\n\n try {\n const response = await client.request<CopyDatasetResponse>({\n method: 'PUT',\n uri: `/datasets/${sourceDatasetName}/copy`,\n body: {\n targetDataset: targetDatasetName,\n skipHistory: shouldSkipHistory,\n },\n })\n\n output.print(\n `Copying dataset ${chalk.green(sourceDatasetName)} to ${chalk.green(targetDatasetName)}...`,\n )\n\n if (!shouldSkipHistory) {\n output.print(\n `Note: You can run this command with flag '--skip-history'. The flag will reduce copy time in larger datasets.`,\n )\n }\n\n output.print(`Job ${chalk.green(response.jobId)} started`)\n\n if (flags.detach) {\n return\n }\n\n await followProgress(response.jobId, client, output)\n output.print(`Job ${chalk.green(response.jobId)} completed`)\n } catch (error) {\n if (error.statusCode) {\n output.print(`${chalk.red(`Dataset copying failed:\\n${error.response.body.message}`)}\\n`)\n } else {\n output.print(`${chalk.red(`Dataset copying failed:\\n${error.message}`)}\\n`)\n }\n }\n },\n}\n\nexport default copyDatasetCommand\n","import {type CliCommandDefinition, type CliOutputter, type CliPrompter} from '@sanity/cli'\n\nimport {promptForDatasetName} from '../../actions/dataset/datasetNamePrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\n\nconst helpText = `\nOptions\n --visibility <mode> Set visibility for this dataset (public/private)\n\nExamples\n sanity dataset create\n sanity dataset create <name>\n sanity dataset create <name> --visibility private\n`\n\nconst allowedModes = ['private', 'public', 'custom']\n\ninterface CreateFlags {\n visibility?: 'private' | 'public' | 'custom'\n}\n\nconst createDatasetCommand: CliCommandDefinition<CreateFlags> = {\n name: 'create',\n group: 'dataset',\n signature: '[NAME]',\n helpText,\n description: 'Create a new dataset within your project',\n action: async (args, context) => {\n const {apiClient, output, prompt} = context\n const flags = args.extOptions\n const [dataset] = args.argsWithoutOptions\n const client = apiClient()\n\n const nameError = dataset && validateDatasetName(dataset)\n if (nameError) {\n throw new Error(nameError)\n }\n\n const [datasets, projectFeatures] = await Promise.all([\n client.datasets.list().then((sets) => sets.map((ds) => ds.name)),\n client.request({uri: '/features'}),\n ])\n\n if (flags.visibility && !allowedModes.includes(flags.visibility)) {\n throw new Error(`Visibility mode \"${flags.visibility}\" not allowed`)\n }\n\n const datasetName = await (dataset || promptForDatasetName(prompt))\n if (datasets.includes(datasetName)) {\n throw new Error(`Dataset \"${datasetName}\" already exists`)\n }\n\n const canCreatePrivate = projectFeatures.includes('privateDataset')\n debug('%s create private datasets', canCreatePrivate ? 'Can' : 'Cannot')\n\n const defaultAclMode = canCreatePrivate ? flags.visibility : 'public'\n const aclMode = await (defaultAclMode || promptForDatasetVisibility(prompt, output))\n\n try {\n await client.datasets.create(datasetName, {aclMode})\n output.print('Dataset created successfully')\n } catch (err) {\n throw new Error(`Dataset creation failed:\\n${err.message}`)\n }\n },\n}\n\nasync function promptForDatasetVisibility(prompt: CliPrompter, output: CliOutputter) {\n const mode = await prompt.single<'public' | 'private'>({\n type: 'list',\n message: 'Dataset visibility',\n choices: [\n {\n value: 'public',\n name: 'Public (world readable)',\n },\n {\n value: 'private',\n name: 'Private (Authenticated user or token needed)',\n },\n ],\n })\n\n if (mode === 'private') {\n output.print(\n 'Please note that while documents are private, assets (files and images) are still public\\n',\n )\n }\n\n return mode\n}\n\nexport default createDatasetCommand\n","export default {\n name: 'dataset',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages datasets, like create or delete, within projects',\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst datasetVisibilityCommand: CliCommandDefinition = {\n name: 'visibility',\n group: 'dataset',\n helpText: '',\n signature: 'get/set [dataset] [mode]',\n description: 'Set visibility of a dataset',\n action: async (args, context) => {\n const {apiClient, output} = context\n const [action, ds, aclMode] = args.argsWithoutOptions\n const client = apiClient()\n\n if (!client.datasets.edit) {\n throw new Error('@sanity/cli must be upgraded first:\\n npm install -g @sanity/cli')\n }\n\n if (!action) {\n throw new Error('Action must be provided (get/set)')\n }\n\n if (!['set', 'get'].includes(action)) {\n throw new Error('Invalid action (only get/set allowed)')\n }\n\n if (!ds) {\n throw new Error('Dataset name must be provided')\n }\n\n if (action === 'set' && !aclMode) {\n throw new Error('Please provide a visibility mode (public/private)')\n }\n\n const dataset = `${ds}`\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw new Error(dsError)\n }\n\n const current = (await client.datasets.list()).find((curr) => curr.name === dataset)\n\n if (!current) {\n throw new Error('Dataset not found')\n }\n\n if (action === 'get') {\n output.print(current.aclMode)\n return\n }\n\n if (current.aclMode === aclMode) {\n output.print(`Dataset already in \"${aclMode}\"-mode`)\n return\n }\n\n if (aclMode === 'private') {\n output.print(\n 'Please note that while documents are private, assets (files and images) are still public\\n',\n )\n }\n\n await client.datasets.edit(dataset, {aclMode: aclMode as 'public' | 'private'})\n output.print('Dataset visibility changed')\n },\n}\n\nexport default datasetVisibilityCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst helpText = `\nOptions\n --force Do not prompt for delete confirmation - forcefully delete\n\nExamples\n sanity dataset delete\n sanity dataset delete my-dataset\n sanity dataset delete my-dataset --force\n`\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2)).option('force', {type: 'boolean'}).argv\n}\n\ninterface DeleteDatasetFlags {\n force?: boolean\n}\n\nconst deleteDatasetCommand: CliCommandDefinition<DeleteDatasetFlags> = {\n name: 'delete',\n group: 'dataset',\n helpText,\n signature: '[datasetName]',\n description: 'Delete a dataset within your project',\n action: async (args, context) => {\n const {apiClient, prompt, output} = context\n const {force} = await parseCliFlags(args)\n const [ds] = args.argsWithoutOptions\n if (!ds) {\n throw new Error('Dataset name must be provided')\n }\n\n const dataset = `${ds}`\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw dsError\n }\n\n if (force) {\n output.warn(`'--force' used: skipping confirmation, deleting dataset \"${dataset}\"`)\n } else {\n await prompt.single({\n type: 'input',\n message:\n 'Are you ABSOLUTELY sure you want to delete this dataset?\\n Type the name of the dataset to confirm delete:',\n filter: (input) => `${input}`.trim(),\n validate: (input) => {\n return input === dataset || 'Incorrect dataset name. Ctrl + C to cancel delete.'\n },\n })\n }\n\n await apiClient().datasets.delete(dataset)\n output.print('Dataset deleted successfully')\n },\n}\n\nexport default deleteDatasetCommand\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition, type CliPrompter} from '@sanity/cli'\nimport exportDataset from '@sanity/export'\nimport {absolutify} from '@sanity/util/fs'\nimport prettyMs from 'pretty-ms'\n\nimport {chooseDatasetPrompt} from '../../actions/dataset/chooseDatasetPrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\n\nconst noop = () => null\n\nconst helpText = `\nOptions\n --raw Extract only documents, without rewriting asset references\n --no-assets Export only non-asset documents and remove references to image assets\n --no-drafts Export only published versions of documents\n --no-compress Skips compressing tarball entries (still generates a gzip file)\n --types Defines which document types to export\n --overwrite Overwrite any file with the same name\n --asset-concurrency <num> Concurrent number of asset downloads\n --mode <stream|cursor> Uses a cursor when exporting, this might be more performant for larger datasets, but might not be as accurate if the dataset is being modified during export. Defaults to stream\n\nExamples\n sanity dataset export moviedb localPath.tar.gz\n sanity dataset export moviedb assetless.tar.gz --no-assets\n sanity dataset export staging staging.tar.gz --raw\n sanity dataset export staging staging.tar.gz --types products,shops\n`\n\ninterface ExportFlags {\n 'raw'?: boolean\n 'assets'?: boolean\n 'drafts'?: boolean\n 'compress'?: boolean\n 'overwrite'?: boolean\n 'types'?: string\n 'asset-concurrency'?: string\n 'mode'?: string\n}\n\ninterface ParsedExportFlags {\n raw?: boolean\n assets?: boolean\n drafts?: boolean\n compress?: boolean\n overwrite?: boolean\n types?: string[]\n assetConcurrency?: number\n mode?: string\n}\n\nfunction parseFlags(rawFlags: ExportFlags): ParsedExportFlags {\n const flags: ParsedExportFlags = {}\n if (rawFlags.types) {\n flags.types = `${rawFlags.types}`.split(',')\n }\n\n if (rawFlags['asset-concurrency']) {\n flags.assetConcurrency = parseInt(rawFlags['asset-concurrency'], 10)\n }\n\n if (typeof rawFlags.raw !== 'undefined') {\n flags.raw = Boolean(rawFlags.raw)\n }\n\n if (typeof rawFlags.assets !== 'undefined') {\n flags.assets = Boolean(rawFlags.assets)\n }\n\n if (typeof rawFlags.drafts !== 'undefined') {\n flags.drafts = Boolean(rawFlags.drafts)\n }\n\n if (typeof rawFlags.compress !== 'undefined') {\n flags.compress = Boolean(rawFlags.compress)\n }\n\n if (typeof rawFlags.overwrite !== 'undefined') {\n flags.overwrite = Boolean(rawFlags.overwrite)\n }\n\n if (typeof rawFlags.mode !== 'undefined') {\n flags.mode = rawFlags.mode\n }\n\n return flags\n}\n\ninterface ProgressEvent {\n step: string\n update?: boolean\n current: number\n total: number\n}\n\nconst exportDatasetCommand: CliCommandDefinition<ExportFlags> = {\n name: 'export',\n group: 'dataset',\n signature: '[NAME] [DESTINATION]',\n description: 'Export dataset to local filesystem as a gzipped tarball',\n helpText,\n action: async (args, context) => {\n const {apiClient, output, chalk, workDir, prompt} = context\n const client = apiClient()\n const [targetDataset, targetDestination] = args.argsWithoutOptions\n const flags = parseFlags(args.extOptions)\n\n let dataset = targetDataset ? `${targetDataset}` : null\n if (!dataset) {\n dataset = await chooseDatasetPrompt(context, {message: 'Select dataset to export'})\n }\n\n const dsError = validateDatasetName(dataset)\n if (dsError) {\n throw dsError\n }\n\n // Verify existence of dataset before trying to export from it\n const datasets = await client.datasets.list()\n if (!datasets.find((set) => set.name === dataset)) {\n throw new Error(`Dataset with name \"${dataset}\" not found`)\n }\n\n // Print information about what projectId and dataset it is being exported from\n const {projectId} = client.config()\n\n output.print('╭───────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Exporting from: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(44)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(dataset).padEnd(46)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────╯')\n output.print('')\n\n let destinationPath = targetDestination\n if (!destinationPath) {\n destinationPath = await prompt.single({\n type: 'input',\n message: 'Output path:',\n default: path.join(workDir, `${dataset}.tar.gz`),\n filter: absolutify,\n })\n }\n\n const outputPath = await getOutputPath(destinationPath, dataset, prompt, flags)\n if (!outputPath) {\n output.print('Cancelled')\n return\n }\n\n // If we are dumping to a file, let the user know where it's at\n if (outputPath !== '-') {\n output.print(`Exporting dataset \"${chalk.cyan(dataset)}\" to \"${chalk.cyan(outputPath)}\"`)\n }\n\n let currentStep = 'Exporting documents...'\n let spinner = output.spinner(currentStep).start()\n const onProgress = (progress: ProgressEvent) => {\n if (progress.step !== currentStep) {\n spinner.succeed()\n spinner = output.spinner(progress.step).start()\n } else if (progress.step === currentStep && progress.update) {\n spinner.text = `${progress.step} (${progress.current}/${progress.total})`\n }\n\n currentStep = progress.step\n }\n\n const start = Date.now()\n try {\n await exportDataset({\n client,\n dataset,\n outputPath,\n onProgress,\n ...flags,\n })\n spinner.succeed()\n } catch (err) {\n spinner.fail()\n throw err\n }\n\n output.print(`Export finished (${prettyMs(Date.now() - start)})`)\n },\n}\n\n// eslint-disable-next-line complexity\nasync function getOutputPath(\n destination: string,\n dataset: string,\n prompt: CliPrompter,\n flags: ParsedExportFlags,\n) {\n if (destination === '-') {\n return '-'\n }\n\n const dstPath = path.isAbsolute(destination)\n ? destination\n : path.resolve(process.cwd(), destination)\n\n let dstStats = await fs.stat(dstPath).catch(noop)\n const looksLikeFile = dstStats ? dstStats.isFile() : path.basename(dstPath).indexOf('.') !== -1\n\n if (!dstStats) {\n const createPath = looksLikeFile ? path.dirname(dstPath) : dstPath\n\n await fs.mkdir(createPath, {recursive: true})\n }\n\n const finalPath = looksLikeFile ? dstPath : path.join(dstPath, `${dataset}.tar.gz`)\n dstStats = await fs.stat(finalPath).catch(noop)\n\n if (!flags.overwrite && dstStats && dstStats.isFile()) {\n const shouldOverwrite = await prompt.single({\n type: 'confirm',\n message: `File \"${finalPath}\" already exists, would you like to overwrite it?`,\n default: false,\n })\n\n if (!shouldOverwrite) {\n return false\n }\n }\n\n return finalPath\n}\n\nexport default exportDatasetCommand\n","import {createReadStream} from 'node:fs'\nimport fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandContext, type CliCommandDefinition, type CliOutputter} from '@sanity/cli'\nimport sanityImport from '@sanity/import'\nimport {getIt} from 'get-it'\n// eslint-disable-next-line import/extensions\nimport {promise} from 'get-it/middleware'\nimport {padStart} from 'lodash'\nimport prettyMs from 'pretty-ms'\n\nimport {chooseDatasetPrompt} from '../../actions/dataset/chooseDatasetPrompt'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName'\nimport {debug} from '../../debug'\n\nconst yellow = (str: string) => `\\u001b[33m${str}\\u001b[39m`\n\nconst helpText = `\nOptions\n --missing On duplicate document IDs, skip importing document in question\n --replace On duplicate document IDs, replace existing document with imported document\n --allow-failing-assets Skip assets that cannot be fetched/uploaded\n --replace-assets Skip reuse of existing assets\n --skip-cross-dataset-references Skips references to other datasets\n\nRarely used options (should generally not be used)\n --allow-assets-in-different-dataset Allow asset documents to reference different project/dataset\n --allow-system-documents Allow system documents like dataset permissions and custom retention to be imported\n\nExamples\n # Import \"moviedb.ndjson\" from the current directory to the dataset called \"moviedb\"\n sanity dataset import moviedb.ndjson moviedb\n\n # Import \"moviedb.tar.gz\" from the current directory to the dataset called \"moviedb\",\n # replacing any documents encountered that have the same document IDs\n sanity dataset import moviedb.tar.gz moviedb --replace\n\n # Import from a folder containing an ndjson file, such as an extracted tarball\n # retrieved through \"sanity dataset export\".\n sanity dataset import ~/some/folder moviedb\n\n # Import from a remote URL. Will download and extract the tarball to a temporary\n # location before importing it.\n sanity dataset import https://some.url/moviedb.tar.gz moviedb --replace\n`\n\ninterface ImportFlags {\n 'allow-assets-in-different-dataset'?: boolean\n 'allow-failing-assets'?: boolean\n 'asset-concurrency'?: boolean\n 'replace-assets'?: boolean\n 'skip-cross-dataset-references'?: boolean\n 'allow-system-documents'?: boolean\n 'replace'?: boolean\n 'missing'?: boolean\n}\n\ninterface ParsedImportFlags {\n allowAssetsInDifferentDataset?: boolean\n allowFailingAssets?: boolean\n assetConcurrency?: boolean\n skipCrossDatasetReferences?: boolean\n allowSystemDocuments?: boolean\n replaceAssets?: boolean\n replace?: boolean\n missing?: boolean\n}\n\ninterface ProgressEvent {\n step: string\n total?: number\n current?: number\n}\n\ninterface ImportWarning {\n type?: string\n url?: string\n}\n\nfunction toBoolIfSet(flag: unknown): boolean | undefined {\n return typeof flag === 'undefined' ? undefined : Boolean(flag)\n}\n\nfunction parseFlags(rawFlags: ImportFlags): ParsedImportFlags {\n const allowAssetsInDifferentDataset = toBoolIfSet(rawFlags['allow-assets-in-different-dataset'])\n const allowFailingAssets = toBoolIfSet(rawFlags['allow-failing-assets'])\n const assetConcurrency = toBoolIfSet(rawFlags['asset-concurrency'])\n const replaceAssets = toBoolIfSet(rawFlags['replace-assets'])\n const skipCrossDatasetReferences = toBoolIfSet(rawFlags['skip-cross-dataset-references'])\n const allowSystemDocuments = toBoolIfSet(rawFlags['allow-system-documents'])\n const replace = toBoolIfSet(rawFlags.replace)\n const missing = toBoolIfSet(rawFlags.missing)\n return {\n allowAssetsInDifferentDataset,\n allowFailingAssets,\n assetConcurrency,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n replaceAssets,\n replace,\n missing,\n }\n}\n\nconst importDatasetCommand: CliCommandDefinition = {\n name: 'import',\n group: 'dataset',\n signature: '[FILE | FOLDER | URL] [TARGET_DATASET]',\n description: 'Import documents to given dataset from either an ndjson file or a gzipped tarball',\n helpText,\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {apiClient, output, chalk, fromInitCommand} = context\n const flags = parseFlags(args.extOptions)\n const {\n allowAssetsInDifferentDataset,\n allowFailingAssets,\n assetConcurrency,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n replaceAssets,\n } = flags\n\n const operation = getMutationOperation(args.extOptions)\n const client = apiClient()\n\n const [file, target] = args.argsWithoutOptions\n if (!file) {\n throw new Error(\n `Source file name and target dataset must be specified (\"sanity dataset import ${chalk.bold(\n '[file]',\n )} [dataset]\")`,\n )\n }\n\n const targetDataset = await determineTargetDataset(target, context)\n debug(`Target dataset has been set to \"${targetDataset}\"`)\n\n const isUrl = /^https?:\\/\\//i.test(file)\n let inputStream\n let assetsBase\n let sourceIsFolder = false\n\n if (isUrl) {\n debug('Input is a URL, streaming from source URL')\n inputStream = await getUrlStream(file)\n } else {\n const sourceFile = path.resolve(process.cwd(), file)\n const fileStats = await fs.stat(sourceFile).catch(() => null)\n if (!fileStats) {\n throw new Error(`${sourceFile} does not exist or is not readable`)\n }\n\n sourceIsFolder = fileStats.isDirectory()\n if (sourceIsFolder) {\n inputStream = sourceFile\n } else {\n assetsBase = path.dirname(sourceFile)\n inputStream = await createReadStream(sourceFile)\n }\n }\n\n const importClient = client.clone().config({dataset: targetDataset})\n\n // Print information about what projectId and dataset it is being imported to\n const {projectId, dataset} = importClient.config()\n\n output.print('╭───────────────────────────────────────────────╮')\n output.print('│ │')\n output.print('│ Importing to: │')\n output.print(`│ ${chalk.bold('projectId')}: ${chalk.cyan(projectId).padEnd(44)} │`)\n output.print(`│ ${chalk.bold('dataset')}: ${chalk.cyan(dataset).padEnd(46)} │`)\n output.print('│ │')\n output.print('╰───────────────────────────────────────────────╯')\n output.print('')\n\n let currentStep: string | undefined\n let currentProgress: ReturnType<CliOutputter['spinner']> | undefined\n let stepStart: number | undefined\n let spinInterval: ReturnType<typeof setInterval> | null = null\n let percent: string | undefined\n\n function onProgress(opts: ProgressEvent) {\n const lengthComputable = opts.total\n const sameStep = opts.step == currentStep\n percent = getPercentage(opts)\n\n if (lengthComputable && opts.total === opts.current) {\n if (spinInterval) {\n clearInterval(spinInterval)\n }\n spinInterval = null\n }\n\n if (sameStep) {\n return\n }\n\n // Moved to a new step\n const prevStep = currentStep\n const prevStepStart = stepStart || Date.now()\n stepStart = Date.now()\n currentStep = opts.step\n\n if (currentProgress && currentProgress.succeed) {\n const timeSpent = prettyMs(Date.now() - prevStepStart, {\n secondsDecimalDigits: 2,\n })\n currentProgress.text = `[100%] ${prevStep} (${timeSpent})`\n currentProgress.succeed()\n }\n\n currentProgress = output.spinner(`[0%] ${opts.step} (0.00s)`).start()\n\n if (spinInterval) {\n clearInterval(spinInterval)\n spinInterval = null\n }\n\n spinInterval = setInterval(() => {\n const timeSpent = prettyMs(Date.now() - prevStepStart, {\n secondsDecimalDigits: 2,\n })\n\n if (currentProgress) {\n currentProgress.text = `${percent}${opts.step} (${timeSpent})`\n }\n }, 60)\n }\n\n function endTask({success}: {success: boolean}) {\n if (spinInterval) {\n clearInterval(spinInterval)\n }\n\n spinInterval = null\n\n if (success && stepStart && currentProgress) {\n const timeSpent = prettyMs(Date.now() - stepStart, {\n secondsDecimalDigits: 2,\n })\n currentProgress.text = `[100%] ${currentStep} (${timeSpent})`\n currentProgress.succeed()\n } else if (currentProgress) {\n currentProgress.fail()\n }\n }\n\n // Start the import!\n try {\n const {numDocs, warnings} = await sanityImport(inputStream, {\n client: importClient,\n assetsBase,\n operation,\n onProgress,\n allowFailingAssets,\n allowAssetsInDifferentDataset,\n skipCrossDatasetReferences,\n allowSystemDocuments,\n assetConcurrency,\n replaceAssets,\n })\n\n endTask({success: true})\n\n output.print('Done! Imported %d documents to dataset \"%s\"\\n', numDocs, targetDataset)\n printWarnings(warnings, output)\n } catch (err) {\n endTask({success: false})\n\n const isNonRefConflict =\n !fromInitCommand &&\n err.response &&\n err.response.statusCode === 409 &&\n err.step !== 'strengthen-references'\n\n if (!isNonRefConflict) {\n throw err\n }\n\n const message = [\n err.message,\n '',\n 'You probably want either:',\n ' --replace (replace existing documents with same IDs)',\n ' --missing (only import documents that do not already exist)',\n '',\n ].join('\\n')\n\n // @todo SUBCLASS ERROR?\n const error = new Error(message) as any\n error.details = err.details\n error.response = err.response\n error.responseBody = err.responseBody\n\n throw error\n }\n },\n}\n\nasync function determineTargetDataset(target: string, context: CliCommandContext) {\n const {apiClient, output, prompt} = context\n const client = apiClient()\n\n if (target) {\n const dsError = validateDatasetName(target)\n if (dsError) {\n throw new Error(dsError)\n }\n }\n\n debug('Fetching available datasets')\n const spinner = output.spinner('Fetching available datasets').start()\n const datasets = await client.datasets.list()\n spinner.succeed('[100%] Fetching available datasets')\n\n let targetDataset = target ? `${target}` : null\n if (!targetDataset) {\n targetDataset = await chooseDatasetPrompt(context, {\n message: 'Select target dataset',\n allowCreation: true,\n })\n } else if (!datasets.find((dataset) => dataset.name === targetDataset)) {\n debug('Target dataset does not exist, prompting for creation')\n const shouldCreate = await prompt.single({\n type: 'confirm',\n message: `Dataset \"${targetDataset}\" does not exist, would you like to create it?`,\n default: true,\n })\n\n if (!shouldCreate) {\n throw new Error(`Dataset \"${targetDataset}\" does not exist`)\n }\n\n await client.datasets.create(targetDataset)\n }\n\n return targetDataset\n}\n\nfunction getMutationOperation(flags: ParsedImportFlags) {\n const {replace, missing} = flags\n if (replace && missing) {\n throw new Error('Cannot use both --replace and --missing')\n }\n\n if (flags.replace) {\n return 'createOrReplace'\n }\n\n if (flags.missing) {\n return 'createIfNotExists'\n }\n\n return 'create'\n}\n\nfunction getPercentage(opts: ProgressEvent) {\n if (!opts.total || typeof opts.current === 'undefined') {\n return ''\n }\n\n const percent = Math.floor((opts.current / opts.total) * 100)\n return `[${padStart(`${percent}`, 3, ' ')}%] `\n}\n\nfunction getUrlStream(url: string) {\n const request = getIt([promise({onlyBody: true})])\n return request({url, stream: true})\n}\n\nfunction printWarnings(warnings: ImportWarning[], output: CliOutputter) {\n const assetFails = warnings.filter((warn) => warn.type === 'asset')\n\n if (!assetFails.length) {\n return\n }\n\n const warn = (output.warn || output.print).bind(output)\n\n warn(yellow('⚠ Failed to import the following %s:'), assetFails.length > 1 ? 'assets' : 'asset')\n\n warnings.forEach((warning) => {\n warn(` ${warning.url}`)\n })\n}\n\nexport default importDatasetCommand\n","import {type CliCommandAction} from '@sanity/cli'\n\nimport * as aliasClient from './datasetAliasesClient'\nimport {ALIAS_PREFIX} from './datasetAliasesClient'\n\nexport const listAliasesHandler: CliCommandAction = async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n const aliases = await aliasClient.listAliases(client)\n output.print(\n aliases\n .map((set) => `${ALIAS_PREFIX}${set.name} -> ${set.datasetName || '<unlinked>'}`)\n .join('\\n'),\n )\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {listAliasesHandler} from './alias/listAliasesHandler'\n\nconst listDatasetsCommand: CliCommandDefinition = {\n name: 'list',\n group: 'dataset',\n helpText: '',\n signature: '',\n description: 'List datasets of your project',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n const datasets = await client.datasets.list()\n output.print(datasets.map((set) => set.name).join('\\n'))\n\n // Print alias list\n await listAliasesHandler(args, context)\n },\n}\n\nexport default listDatasetsCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeployStudioActionFlags} from '../../actions/deploy/deployAction'\n\nconst helpText = `\nOptions\n --source-maps Enable source maps for built bundles (increases size of bundle)\n --auto-updates / --no-auto-updates Enable/disable auto updates of studio versions\n --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)\n --no-build Don't build the studio prior to deploy, instead deploying the version currently in \\`dist/\\`\n --schema-required Require schema extraction and storing to be successful\n --verbose Enable verbose logging\n -y, --yes Unattended mode, answers \"yes\" to any \"yes/no\" prompt and otherwise uses defaults\n\nExamples\n sanity deploy\n sanity deploy --no-minify --source-maps\n`\n\nconst deployCommand: CliCommandDefinition = {\n name: 'deploy',\n signature: '[SOURCE_DIR] [--no-build] [--source-maps] [--no-minify]',\n description: 'Builds and deploys Sanity Studio to Sanity hosting',\n action: async (\n args: CliCommandArguments<DeployStudioActionFlags>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/deployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deployCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nconst helpText = `\nExamples\n sanity undeploy\n`\n\nconst undeployCommand: CliCommandDefinition = {\n name: 'undeploy',\n signature: '',\n description: 'Removes the deployed Sanity Studio from Sanity hosting',\n action: async (\n args: CliCommandArguments<Record<string, unknown>>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/deploy/undeployAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default undeployCommand\n","import fs from 'node:fs/promises'\nimport os from 'node:os'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {\n type IdentifiedSanityDocumentStub,\n type MultipleMutationResult,\n type Mutation,\n type SanityClient,\n} from '@sanity/client'\nimport {uuid} from '@sanity/uuid'\nimport chokidar from 'chokidar'\nimport execa from 'execa'\nimport json5 from 'json5'\nimport {isEqual, isPlainObject, noop} from 'lodash'\n\ntype MutationOperationName = 'create' | 'createOrReplace' | 'createIfNotExists'\n\ninterface CreateFlags {\n dataset?: string\n replace?: boolean\n missing?: boolean\n watch?: boolean\n json5?: boolean\n id?: string\n}\n\nconst helpText = `\nOptions\n --replace On duplicate document IDs, replace existing document with specified document(s)\n --missing On duplicate document IDs, don't modify the target document(s)\n --watch Write the documents whenever the target file or buffer changes\n --json5 Use JSON5 file type to allow a \"simplified\" version of JSON\n --id <id> Specify a document ID to use. Will fetch remote document ID and populate editor.\n --dataset NAME to override dataset\n\nExamples\n # Create the document specified in \"myDocument.json\".\n sanity documents create myDocument.json\n\n # Open configured $EDITOR and create the specified document(s)\n sanity documents create\n\n # Fetch document with the ID \"myDocId\" and open configured $EDITOR with the\n # current document content (if any). Replace document with the edited version\n # when the editor closes\n sanity documents create --id myDocId --replace\n\n # Open configured $EDITOR and replace the document with the given content\n # on each save. Use JSON5 file extension and parser for simplified syntax.\n sanity documents create --id myDocId --watch --replace --json5\n`\n\nconst createDocumentsCommand: CliCommandDefinition<CreateFlags> = {\n name: 'create',\n group: 'documents',\n signature: '[FILE]',\n helpText,\n description: 'Create one or more documents',\n // eslint-disable-next-line complexity\n action: async (args, context) => {\n const {apiClient, output} = context\n const {replace, missing, watch, id, dataset} = args.extOptions\n const [file] = args.argsWithoutOptions\n const useJson5 = args.extOptions.json5\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n if (replace && missing) {\n throw new Error('Cannot use both --replace and --missing')\n }\n\n if (id && file) {\n throw new Error('Cannot use --id when specifying a file path')\n }\n\n let operation: MutationOperationName = 'create'\n if (replace || missing) {\n operation = replace ? 'createOrReplace' : 'createIfNotExists'\n }\n\n if (file) {\n const contentPath = path.resolve(process.cwd(), file)\n const content = json5.parse(await fs.readFile(contentPath, 'utf8'))\n const result = await writeDocuments(content, operation, client)\n output.print(getResultMessage(result, operation))\n return\n }\n\n // Create a temporary file and use that as source, opening an editor on it\n const docId = id || uuid()\n const ext = useJson5 ? 'json5' : 'json'\n const tmpFile = path.join(os.tmpdir(), 'sanity-cli', `${docId}.${ext}`)\n const stringify = useJson5 ? json5.stringify : JSON.stringify\n const defaultValue = (id && (await client.getDocument(id))) || {_id: docId, _type: 'specify-me'}\n await fs.mkdir(path.join(os.tmpdir(), 'sanity-cli'), {recursive: true})\n await fs.writeFile(tmpFile, stringify(defaultValue, null, 2), 'utf8')\n\n const editor = getEditor()\n if (watch) {\n // If we're in watch mode, we want to run the creation on each change (if it validates)\n registerUnlinkOnSigInt(tmpFile)\n output.print(`Watch mode: ${tmpFile}`)\n output.print('Watch mode: Will write documents on each save.')\n output.print('Watch mode: Press Ctrl + C to cancel watch mode.')\n chokidar.watch(tmpFile).on('change', () => {\n output.print('')\n return readAndPerformCreatesFromFile(tmpFile)\n })\n execa(editor.bin, editor.args.concat(tmpFile), {stdio: 'inherit'})\n } else {\n // While in normal mode, we just want to wait for the editor to close and run the thing once\n execa.sync(editor.bin, editor.args.concat(tmpFile), {stdio: 'inherit'})\n await readAndPerformCreatesFromFile(tmpFile)\n await fs.unlink(tmpFile).catch(noop)\n }\n\n async function readAndPerformCreatesFromFile(filePath: string) {\n let content\n try {\n content = json5.parse(await fs.readFile(filePath, 'utf8'))\n } catch (err) {\n output.error(`Failed to read input: ${err.message}`)\n return\n }\n\n if (isEqual(content, defaultValue)) {\n output.print('Value not modified, doing nothing.')\n output.print('Modify document to trigger creation.')\n return\n }\n\n try {\n const writeResult = await writeDocuments(content, operation, client)\n output.print(getResultMessage(writeResult, operation))\n } catch (err) {\n output.error(`Failed to write documents: ${err.message}`)\n if (err.message.includes('already exists')) {\n output.error('Perhaps you want to use `--replace` or `--missing`?')\n }\n }\n }\n },\n}\n\nfunction registerUnlinkOnSigInt(tmpFile: string) {\n process.on('SIGINT', async () => {\n await fs.unlink(tmpFile).catch(noop)\n // eslint-disable-next-line no-process-exit\n process.exit(130)\n })\n}\n\nfunction writeDocuments(\n documents: {_id?: string; _type: string} | {_id?: string; _type: string}[],\n operation: MutationOperationName,\n client: SanityClient,\n) {\n const docs = Array.isArray(documents) ? documents : [documents]\n if (docs.length === 0) {\n throw new Error('No documents provided')\n }\n\n const mutations = docs.map((doc, index): Mutation => {\n validateDocument(doc, index, docs)\n if (operation === 'create') {\n return {create: doc}\n }\n\n if (operation === 'createIfNotExists') {\n if (isIdentifiedSanityDocument(doc)) {\n return {createIfNotExists: doc}\n }\n\n throw new Error(`Missing required _id attribute for ${operation}`)\n }\n\n if (operation === 'createOrReplace') {\n if (isIdentifiedSanityDocument(doc)) {\n return {createOrReplace: doc}\n }\n\n throw new Error(`Missing required _id attribute for ${operation}`)\n }\n\n throw new Error(`Unsupported operation ${operation}`)\n })\n\n return client.transaction(mutations).commit()\n}\n\nfunction validateDocument(doc: unknown, index: number, arr: unknown[]) {\n const isSingle = arr.length === 1\n\n if (!isPlainObject(doc)) {\n throw new Error(getErrorMessage('must be an object', index, isSingle))\n }\n\n if (!isSanityDocumentish(doc)) {\n throw new Error(getErrorMessage('must have a `_type` property of type string', index, isSingle))\n }\n}\n\nfunction isSanityDocumentish(doc: unknown): doc is {_type: string} {\n return (\n doc !== null &&\n typeof doc === 'object' &&\n '_type' in doc &&\n typeof (doc as any)._type === 'string'\n )\n}\n\nfunction isIdentifiedSanityDocument(doc: unknown): doc is IdentifiedSanityDocumentStub {\n return isSanityDocumentish(doc) && '_id' in doc\n}\n\nfunction getErrorMessage(message: string, index: number, isSingle: boolean): string {\n return isSingle ? `Document ${message}` : `Document at index ${index} ${message}`\n}\n\nfunction getResultMessage(\n result: MultipleMutationResult,\n operation: MutationOperationName,\n): string {\n const joiner = '\\n - '\n if (operation === 'createOrReplace') {\n return `Upserted:\\n - ${result.results.map((res) => res.id).join(joiner)}`\n }\n\n if (operation === 'create') {\n return `Created:\\n - ${result.results.map((res) => res.id).join(joiner)}`\n }\n\n // \"Missing\" (createIfNotExists)\n const created: string[] = []\n const skipped: string[] = []\n for (const res of result.results) {\n if (res.operation === 'update') {\n skipped.push(res.id)\n } else {\n created.push(res.id)\n }\n }\n\n if (created.length > 0 && skipped.length > 0) {\n return [\n `Created:\\n - ${created.join(joiner)}`,\n `Skipped (already exists):${joiner}${skipped.join(joiner)}`,\n ].join('\\n\\n')\n } else if (created.length > 0) {\n return `Created:\\n - ${created.join(joiner)}`\n }\n\n return `Skipped (already exists):\\n - ${skipped.join(joiner)}`\n}\n\nfunction getEditor() {\n const defaultEditor = /^win/.test(process.platform) ? 'notepad' : 'vim'\n // eslint-disable-next-line no-process-env\n const editor = process.env.VISUAL || process.env.EDITOR || defaultEditor\n const args = editor.split(/\\s+/)\n const bin = args.shift() || ''\n return {bin, args}\n}\n\nexport default createDocumentsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport pluralize from 'pluralize-esm'\n\nconst helpText = `\nDelete a document from the projects configured dataset\n\nOptions\n --dataset NAME to override dataset\n\nExample\n # Delete the document with the ID \"myDocId\"\n sanity documents delete myDocId\n\n # ID wrapped in double or single quote works equally well\n sanity documents delete 'myDocId'\n\n # Delete document with ID \"someDocId\" from dataset \"blog\"\n sanity documents delete --dataset=blog someDocId\n\n # Delete the document with ID \"doc1\" and \"doc2\"\n sanity documents delete doc1 doc2\n`\n\ninterface DeleteFlags {\n dataset?: string\n}\n\nconst deleteDocumentsCommand: CliCommandDefinition<DeleteFlags> = {\n name: 'delete',\n group: 'documents',\n signature: '[ID] [...IDS]',\n helpText,\n description: 'Delete a document by ID',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {dataset} = args.extOptions\n const ids = args.argsWithoutOptions.map((str) => `${str}`)\n\n if (!ids.length) {\n throw new Error('Document ID must be specified')\n }\n\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n const transaction = ids.reduce((trx, id) => trx.delete(id), client.transaction())\n try {\n const {results} = await transaction.commit()\n const deleted = results.filter((res) => res.operation === 'delete').map((res) => res.id)\n const notFound = ids.filter((id) => !deleted.includes(id))\n if (deleted.length > 0) {\n output.print(`Deleted ${deleted.length} ${pluralize('document', deleted.length)}`)\n }\n\n if (notFound.length > 0) {\n output.error(\n chalk.red(`${pluralize('Document', notFound.length)} not found: ${notFound.join(', ')}`),\n )\n }\n } catch (err) {\n throw new Error(`Failed to delete ${pluralize('document', ids.length)}:\\n${err.message}`)\n }\n },\n}\n\nexport default deleteDocumentsCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst documentsGroup: CliCommandGroupDefinition = {\n name: 'documents',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages documents in your Sanity Content Lake datasets',\n}\n\nexport default documentsGroup\n","import {type CliCommandContext} from '@sanity/cli'\nimport tokenize, {type LexerToken} from 'json-lexer'\n\ninterface KeyToken {\n type: 'key'\n value: string\n raw: string\n}\n\ntype ExtendedLexerToken = LexerToken | KeyToken\n\nconst identity = (inp: string): string => inp\n\nexport function colorizeJson(input: unknown, chalk: CliCommandContext['chalk']): string {\n const formatters: Record<ExtendedLexerToken['type'], (str: string) => string> = {\n punctuator: chalk.white,\n key: chalk.white,\n string: chalk.green,\n number: chalk.yellow,\n literal: chalk.bold,\n whitespace: identity,\n }\n\n const json = JSON.stringify(input, null, 2)\n\n return tokenize(json)\n .map((token, i, arr): ExtendedLexerToken => {\n // Note how the following only works because we pretty-print the JSON\n const prevToken = i === 0 ? token : arr[i - 1]\n if (\n token.type === 'string' &&\n prevToken.type === 'whitespace' &&\n /^\\n\\s+$/.test(prevToken.value)\n ) {\n return {...token, type: 'key'}\n }\n\n return token\n })\n .map((token) => {\n const formatter = formatters[token.type] || identity\n return formatter(token.raw)\n })\n .join('')\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {colorizeJson} from '../../util/colorizeJson'\n\nconst helpText = `\nGet and print a document from the projects configured dataset\n\nOptions\n --pretty colorized JSON output\n --dataset NAME to override dataset\n\nExamples\n # Get the document with the ID \"myDocId\"\n sanity documents get myDocId\n\n # ID wrapped in double or single quote works equally well\n sanity documents get 'myDocId'\n`\n\ninterface GetDocumentFlags {\n pretty?: boolean\n dataset?: string\n}\n\nconst getDocumentsCommand: CliCommandDefinition<GetDocumentFlags> = {\n name: 'get',\n group: 'documents',\n signature: '[DOCUMENT_ID]',\n helpText,\n description: 'Get and print a document by ID',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {pretty, dataset} = args.extOptions\n const [docId] = args.argsWithoutOptions.map((str) => `${str}`)\n\n if (!docId) {\n throw new Error('Document ID must be specified')\n }\n\n const client = dataset ? apiClient().clone().config({dataset}) : apiClient()\n\n try {\n const doc = await client.getDocument(docId)\n if (!doc) {\n throw new Error(`Document ${docId} not found`)\n }\n\n output.print(pretty ? colorizeJson(doc, chalk) : JSON.stringify(doc, null, 2))\n } catch (err) {\n throw new Error(`Failed to fetch document:\\n${err.message}`)\n }\n },\n}\n\nexport default getDocumentsCommand\n","import {type CliCommandArguments, type CliCommandContext} from '@sanity/cli'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {colorizeJson} from '../../util/colorizeJson'\n\nconst defaultApiVersion = 'v2022-06-01'\n\nconst helpText = `\nRun a query against the projects configured dataset\n\nOptions\n --pretty colorized JSON output\n --dataset NAME to override dataset\n --project PROJECT to override project ID\n --anonymous Send the query without any authorization token\n --api-version API version to use (defaults to \\`${defaultApiVersion}\\`)\n\nEnvironment variables\n \\`SANITY_CLI_QUERY_API_VERSION\\` - will use the defined API version,\n unless \\`--api-version\\` is specified.\n\nExamples\n # Fetch 5 documents of type \"movie\"\n sanity documents query '*[_type == \"movie\"][0..4]'\n\n # Fetch title of the oldest movie in the dataset named \"staging\"\n sanity documents query '*[_type == \"movie\"]|order(releaseDate asc)[0]{title}' --dataset staging\n\n # Use API version v2021-06-07 and do a query\n sanity documents query --api-version v2021-06-07 '*[_id == \"header\"] { \"headerText\": pt::text(body) }'\n`\n\ninterface CliQueryCommandFlags {\n pretty?: boolean\n anonymous?: boolean\n dataset?: string\n project?: string\n apiVersion?: string\n}\n\nexport default {\n name: 'query',\n group: 'documents',\n signature: '[QUERY]',\n helpText,\n description: 'Query for documents',\n action: async (\n args: CliCommandArguments<CliQueryCommandFlags>,\n context: CliCommandContext,\n ): Promise<void> => {\n // Reparsing arguments for improved control of flags\n const {\n pretty,\n dataset,\n project,\n anonymous,\n 'api-version': apiVersion,\n } = await parseCliFlags(args)\n const {apiClient, output, chalk, cliConfig} = context\n const [query] = args.argsWithoutOptions\n\n if (!query) {\n throw new Error('Query must be specified')\n }\n\n if (!apiVersion) {\n output.warn(chalk.yellow(`--api-version not specified, using \\`${defaultApiVersion}\\``))\n }\n\n const requireDataset = !dataset\n const requireProject = !project\n const requireUser = !anonymous\n\n if (requireProject && !cliConfig?.api?.projectId) {\n throw new Error(\n 'No project configured in CLI config - either configure one, or use `--project` flag',\n )\n }\n\n if (requireDataset && !cliConfig?.api?.dataset) {\n throw new Error(\n 'No dataset configured in CLI config - either configure one, or use `--dataset` flag',\n )\n }\n\n const baseClient = apiClient({requireProject, requireUser}).clone()\n const {dataset: originalDataset, projectId: originalProjectId} = baseClient.config()\n\n const client = baseClient.config({\n projectId: project || originalProjectId,\n dataset: dataset || originalDataset,\n apiVersion: apiVersion || defaultApiVersion,\n })\n\n try {\n const docs = await client.fetch(query)\n if (!docs) {\n throw new Error('Query returned no results')\n }\n\n output.print(pretty ? colorizeJson(docs, chalk) : JSON.stringify(docs, null, 2))\n } catch (err) {\n throw new Error(`Failed to run query:\\n${err.message}`)\n }\n },\n}\n\nfunction parseCliFlags(args: CliCommandArguments<CliQueryCommandFlags>) {\n // eslint-disable-next-line no-process-env\n const fallbackApiVersion = process.env.SANITY_CLI_QUERY_API_VERSION\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .option('pretty', {type: 'boolean', default: false})\n .option('dataset', {type: 'string'})\n .option('project', {type: 'string'})\n .option('anonymous', {type: 'boolean', default: false})\n .option('api-version', {type: 'string', default: fallbackApiVersion}).argv\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = `Downloads and validates all document specified in a workspace`\n\nconst helpText = `\nOptions\n -y, --yes Skips the first confirmation prompt.\n --workspace <name> The name of the workspace to use when downloading and validating all documents.\n --dataset <name> Override the dataset used. By default, this is derived from the given workspace.\n --file <filepath> Provide a path to either an .ndjson file or a tarball containing an .ndjson file.\n --format <pretty|ndjson|json> The output format used to print the found validation markers and report progress.\n --level <error|warning|info> The minimum level reported out. Defaults to warning.\n --max-custom-validation-concurrency <number> Specify how many custom validators can run concurrently. Defaults to 5.\n --max-fetch-concurrency <number> Specify how many \\`client.fetch\\` requests are allow concurrency at once. Defaults to 25.\n\nExamples\n # Validates all documents in a Sanity project with more than one workspace\n sanity documents validate --workspace default\n\n # Override the dataset specified in the workspace\n sanity documents validate --workspace default --dataset staging\n\n # Save the results of the report into a file\n sanity documents validate --yes > report.txt\n\n # Report out info level validation markers too\n sanity documents validate --level info\n`\n\nconst validateDocumentsCommand: CliCommandDefinition = {\n name: 'validate',\n group: 'documents',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/validation/validateAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default validateDocumentsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst helpText = `\nOptions\n --with-user-token Prime access token from CLI config into getCliClient()\n --mock-browser-env Mocks a browser-like environment using jsdom\n\nExamples\n # Run the script at some/script.js in Sanity context\n sanity exec some/script.js\n\n # Run the script at migrations/fullname.ts and configure \\`getCliClient()\\`\n # from \\`sanity/cli\\`to include the current user's token\n sanity exec migrations/fullname.ts --with-user-token\n\n # Run the script at scripts/browserScript.js in a mock browser environment\n sanity exec scripts/browserScript.js --mock-browser-env\n\n # Pass arbitrary arguments to scripts by separating them with a \\`--\\`.\n # Arguments are available in \\`process.argv\\` as they would in regular node scripts\n # eg the following command would yield a \\`process.argv\\` of:\n # ['/path/to/node', '/path/to/myscript.js', '--dry-run', 'positional-argument']\n sanity exec --mock-browser-env myscript.js -- --dry-run positional-argument\n`\n\nexport const execCommand: CliCommandDefinition = {\n name: 'exec',\n signature: 'SCRIPT',\n description: 'Executes a script within the Sanity Studio context',\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/exec/execScript')\n\n return mod.default(args, context)\n },\n}\n\nexport default execCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type DeleteGraphQLApiFlags} from '../../actions/graphql/deleteApiAction'\n\nconst helpText = `\nOptions\n --api <api-id> Undeploy API with this ID (project, dataset and tag flags takes preference)\n --project <projectId> Project ID to delete GraphQL API for\n --dataset <dataset> Delete GraphQL API for the given dataset\n --tag <tag> Delete GraphQL API for the given tag (defaults to 'default')\n --force Skip confirmation prompt, forcefully undeploying the GraphQL API\n\nExamples\n sanity graphql undeploy\n sanity graphql undeploy --api ios\n sanity graphql undeploy --dataset staging\n sanity graphql undeploy --dataset staging --tag next\n`\n\nconst deleteGraphQLAPICommand: CliCommandDefinition = {\n name: 'undeploy',\n group: 'graphql',\n signature: '',\n description: 'Remove a deployed GraphQL API',\n action: async (args: CliCommandArguments<DeleteGraphQLApiFlags>, context: CliCommandContext) => {\n const mod = await import('../../actions/graphql/deleteApiAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deleteGraphQLAPICommand\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nconst helpText = `\nOptions\n --dry-run Validate defined APIs, exiting with an error on breaking changes\n --force Deploy API without confirming breaking changes\n --api <api-id> Only deploy API with this ID. Can be specified multiple times.\n\nThe following options will override any setting from the CLI configuration file\n(sanity.cli.js/sanity.cli.ts) - and applies to ALL defined APIs defined in that\nconfiguration file. Tread with caution!\n\n --tag Deploy API(s) to given tag (defaults to 'default')\n --dataset <name> Deploy API for the given dataset\n --generation <gen1|gen2|gen3> API generation to deploy (defaults to 'gen3')\n --non-null-document-fields Use non-null document fields (_id, _type etc)\n --playground Enable GraphQL playground for easier debugging\n --no-playground Disable GraphQL playground\n --with-union-cache *Experimental:* Enable union cache that optimizes schema generation for schemas with many self referencing types\n\nExamples\n # Deploy all defined GraphQL APIs\n sanity graphql deploy\n\n # Validate defined GraphQL APIs, check for breaking changes, skip deploy\n sanity graphql deploy --dry-run\n\n # Deploy only the GraphQL APIs with the IDs \"staging\" and \"ios\"\n sanity graphql deploy --api staging --api ios\n\n # Deploy all defined GraphQL APIs, overriding any playground setting\n sanity graphql deploy --playground\n`\n\nconst deployGraphQLAPICommand: CliCommandDefinition = {\n name: 'deploy',\n signature: '',\n group: 'graphql',\n description: 'Deploy a GraphQL API from the current Sanity schema',\n action: async (args: {argv?: string[]}, context: CliCommandContext) => {\n const mod = await import('../../actions/graphql/deployApiAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default deployGraphQLAPICommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst graphqlGroup: CliCommandGroupDefinition = {\n name: 'graphql',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: \"Deploys changes to your project's GraphQL API(s)\",\n}\n\nexport default graphqlGroup\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nconst helpText = `\nExamples\n sanity graphql list\n`\n\nconst listGraphQLAPIsCommand: CliCommandDefinition = {\n name: 'list',\n signature: '',\n group: 'graphql',\n description: 'Lists all the GraphQL endpoints deployed for this project',\n action: async (\n args: CliCommandArguments<Record<string, unknown>>,\n context: CliCommandContext,\n ) => {\n const mod = await import('../../actions/graphql/listApisAction')\n\n return mod.default(args, context)\n },\n helpText,\n}\n\nexport default listGraphQLAPIsCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport open from 'open'\n\nconst createHookCommand: CliCommandDefinition = {\n name: 'create',\n group: 'hook',\n signature: '',\n helpText: '',\n description: 'Create a new hook for the given dataset',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n const {projectId} = client.config()\n if (!projectId) {\n throw new Error('No project ID found')\n }\n\n const projectInfo = (await client.projects.getById(projectId)) || {}\n const organizationId = projectInfo.organizationId || 'personal'\n const manageUrl = `https://www.sanity.io/organizations/${organizationId}/project/${projectId}/api/webhooks/new`\n\n output.print(`Opening ${manageUrl}`)\n open(manageUrl)\n },\n}\n\nexport default createHookCommand\n","import {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type Hook} from './types'\n\nconst deleteHookCommand: CliCommandDefinition = {\n name: 'delete',\n group: 'hook',\n signature: '[NAME]',\n helpText: '',\n description: 'Delete a hook within your project',\n action: async (args, context) => {\n const {apiClient} = context\n const [name] = args.argsWithoutOptions\n const client = apiClient()\n\n const hookId = await promptForHook(name, context)\n try {\n await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request({method: 'DELETE', uri: `/hooks/${hookId}`})\n } catch (err) {\n throw new Error(`Hook deletion failed:\\n${err.message}`)\n }\n },\n}\n\nasync function promptForHook(specified: string | undefined, context: CliCommandContext) {\n const specifiedName = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient()\n\n const hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks', json: true})\n\n if (specifiedName) {\n const selected = hooks.filter((hook) => hook.name.toLowerCase() === specifiedName)[0]\n if (!selected) {\n throw new Error(`Hook with name \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n const choices = hooks.map((hook) => ({value: hook.id, name: hook.name}))\n return prompt.single({\n message: 'Select hook to delete',\n type: 'list',\n choices,\n })\n}\n\nexport default deleteHookCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nconst hookGroup: CliCommandGroupDefinition = {\n name: 'hook',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Sets up and manages webhooks within your Sanity project',\n}\n\nexport default hookGroup\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type DeliveryAttempt} from './types'\n\nconst printHookAttemptCommand: CliCommandDefinition = {\n name: 'attempt',\n group: 'hook',\n signature: 'ATTEMPT_ID',\n helpText: '',\n description: 'Print details of a given webhook delivery attempt',\n action: async (args, context) => {\n const {apiClient, output} = context\n const [attemptId] = args.argsWithoutOptions\n const client = apiClient()\n\n let attempt\n try {\n attempt = await client.request<DeliveryAttempt>({uri: `/hooks/attempts/${attemptId}`})\n } catch (err) {\n throw new Error(`Hook attempt retrieval failed:\\n${err.message}`)\n }\n\n const {createdAt, resultCode, resultBody, failureReason, inProgress} = attempt\n\n output.print(`Date: ${createdAt}`)\n output.print(`Status: ${getStatus(attempt)}`)\n output.print(`Status code: ${resultCode}`)\n\n if (attempt.isFailure) {\n output.print(`Failure: ${formatFailure(attempt)}`)\n }\n\n if (!inProgress && (!failureReason || failureReason === 'http')) {\n const body = resultBody ? `\\n---\\n${resultBody}\\n---\\n` : '<empty>'\n output.print(`Response body: ${body}`)\n }\n },\n}\n\nexport default printHookAttemptCommand\n\nexport function formatFailure(\n attempt: DeliveryAttempt,\n options: {includeHelp?: boolean} = {},\n): string {\n const {includeHelp} = options\n const {id, failureReason, resultCode} = attempt\n const help = includeHelp ? `(run \\`sanity hook attempt ${id}\\` for details)` : ''\n switch (failureReason) {\n case 'http':\n return `HTTP ${resultCode} ${help}`\n case 'timeout':\n return 'Request timed out'\n case 'network':\n return 'Network error'\n case 'other':\n default:\n }\n\n return 'Unknown error'\n}\n\nexport function getStatus(attempt: DeliveryAttempt): string {\n if (attempt.isFailure) {\n return 'Failed'\n }\n\n if (attempt.inProgress) {\n return 'In progress'\n }\n\n return 'Delivered'\n}\n","import {inspect} from 'node:util'\n\nimport {type CliCommandContext, type CliCommandDefinition} from '@sanity/cli'\nimport {groupBy} from 'lodash'\n\nimport {formatFailure} from './printHookAttemptCommand'\nimport {type DeliveryAttempt, type Hook, type HookMessage} from './types'\n\ninterface ListHookFlags {\n detailed?: boolean\n}\n\nconst listHookLogsCommand: CliCommandDefinition<ListHookFlags> = {\n name: 'logs',\n group: 'hook',\n signature: '[NAME]',\n helpText: '',\n description: 'List latest log entries for a given hook',\n action: async (args, context) => {\n const {apiClient} = context\n const flags = args.extOptions\n const [name] = args.argsWithoutOptions\n const client = apiClient()\n\n const hookId = await promptForHook(name, context)\n let messages\n let attempts\n try {\n messages = await client.request<HookMessage[]>({uri: `/hooks/${hookId}/messages`})\n attempts = await client.request<DeliveryAttempt[]>({uri: `/hooks/${hookId}/attempts`})\n } catch (err) {\n throw new Error(`Hook logs retrieval failed:\\n${err.message}`)\n }\n\n const groupedAttempts = groupBy(attempts, 'messageId')\n const populated = messages.map((msg): HookMessage & {attempts: DeliveryAttempt[]} => ({\n ...msg,\n attempts: groupedAttempts[msg.id],\n }))\n\n const totalMessages = messages.length - 1\n populated.forEach((message, i) => {\n printMessage(message, context, {detailed: flags.detailed})\n printSeparator(context, totalMessages === i)\n })\n },\n}\n\nexport default listHookLogsCommand\n\nasync function promptForHook(specified: string | undefined, context: CliCommandContext) {\n const specifiedName = specified && specified.toLowerCase()\n const {prompt, apiClient} = context\n const client = apiClient()\n\n const hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks', json: true})\n\n if (specifiedName) {\n const selected = hooks.filter((hook) => hook.name.toLowerCase() === specifiedName)[0]\n if (!selected) {\n throw new Error(`Hook with name \"${specified} not found\"`)\n }\n\n return selected.id\n }\n\n if (hooks.length === 0) {\n throw new Error('No hooks currently registered')\n }\n\n if (hooks.length === 1) {\n return hooks[0].id\n }\n\n const choices = hooks.map((hook) => ({value: hook.id, name: hook.name}))\n return prompt.single({\n message: 'Select hook to list logs for',\n type: 'list',\n choices,\n })\n}\n\nfunction printSeparator(context: CliCommandContext, skip: boolean) {\n if (!skip) {\n context.output.print('---\\n')\n }\n}\n\nfunction printMessage(\n message: HookMessage & {attempts: DeliveryAttempt[]},\n context: CliCommandContext,\n options: {detailed?: boolean},\n) {\n const {detailed} = options\n const {output, chalk} = context\n\n output.print(`Date: ${message.createdAt}`)\n output.print(`Status: ${message.status}`)\n output.print(`Result code: ${message.resultCode}`)\n\n if (message.failureCount > 0) {\n output.print(`Failures: ${message.failureCount}`)\n }\n\n if (detailed) {\n output.print('Payload:')\n output.print(inspect(JSON.parse(message.payload), {colors: true}))\n }\n\n if (detailed && message.attempts) {\n output.print('Attempts:')\n message.attempts.forEach((attempt) => {\n const date = attempt.createdAt.replace(/\\.\\d+Z$/, 'Z')\n const prefix = ` [${date}]`\n\n if (attempt.inProgress) {\n output.print(`${prefix} ${chalk.yellow('Pending')}`)\n } else if (attempt.isFailure) {\n const failure = formatFailure(attempt, {includeHelp: true})\n output.print(`${prefix} ${chalk.yellow(`Failure: ${failure}`)}`)\n } else {\n output.print(`${prefix} Success: HTTP ${attempt.resultCode} (${attempt.duration}ms)`)\n }\n })\n }\n\n // Leave some empty space between messages\n output.print('')\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nimport {type Hook} from './types'\n\nconst listHooksCommand: CliCommandDefinition = {\n name: 'list',\n group: 'hook',\n signature: '',\n helpText: '',\n description: 'List hooks for a given project',\n action: async (args, context) => {\n const {apiClient, output} = context\n const client = apiClient()\n\n let hooks\n try {\n hooks = await client\n .clone()\n .config({apiVersion: '2021-10-04'})\n .request<Hook[]>({uri: '/hooks'})\n } catch (err) {\n throw new Error(`Hook list retrieval failed:\\n${err.message}`)\n }\n\n hooks.forEach((hook) => {\n output.print(`Name: ${hook.name}`)\n output.print(`Dataset: ${hook.dataset}`)\n output.print(`URL: ${hook.url}`)\n\n if (hook.type === 'document') {\n output.print(`HTTP method: ${hook.httpMethod}`)\n\n if (hook.description) {\n output.print(`Description: ${hook.description}`)\n }\n }\n\n output.print('')\n })\n },\n}\n\nexport default listHooksCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Extracts the studio configuration as one or more JSON manifest files.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change. It is currently intended for use with Create only.\n\nOptions\n --path Optional path to specify destination directory of the manifest files. Default: /dist/static\n\nExamples\n # Extracts manifests\n sanity manifest extract\n\n # Extracts manifests into /public/static\n sanity manifest extract --path /public/static\n`\n\nconst extractManifestCommand: CliCommandDefinition = {\n name: 'extract',\n group: 'manifest',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const {extractManifestSafe} = await import('../../actions/manifest/extractManifestAction')\n const extractError = await extractManifestSafe(args, context)\n if (extractError) {\n throw extractError\n }\n return extractError\n },\n}\n\nexport default extractManifestCommand\n","export default {\n name: 'manifest',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Interacts with the studio configuration.',\n}\n","export const MIGRATIONS_DIRECTORY = 'migrations'\nexport const MIGRATION_SCRIPT_EXTENSIONS = ['mjs', 'js', 'ts', 'cjs']\n","export const minimalAdvanced = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, patch, at, setIfMissing} from 'sanity/migrate'\n\n/**\n * this migration will set \\`Default title\\` on all documents that are missing a title\n * and make \\`true\\` the default value for the \\`enabled\\` field\n */\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n async *migrate(documents, context) {\n for await (const document of documents()) {\n yield patch(document._id, [\n at('title', setIfMissing('Default title')),\n at('enabled', setIfMissing(true)),\n ])\n }\n }\n})\n`\n","export const minimalSimple = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {at, defineMigration, setIfMissing, unset} from 'sanity/migrate'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n document(doc, context) {\n // this will be called for every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n\n return at('title', setIfMissing('Default title'))\n },\n node(node, path, context) {\n // this will be called for every node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n\n if (typeof node === 'string' && node === 'deleteme') {\n return unset()\n }\n },\n object(node, path, context) {\n // this will be called for every object node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n if (node._type === 'author') {\n // make sure all authors objects have a books array\n return at('books', setIfMissing([]))\n }\n },\n array(node, path, context) {\n // this will be called for every array node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n string(node, path, context) {\n // this will be called for every string node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n number(node, path, context) {\n // this will be called for every number node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n boolean(node, path, context) {\n // this will be called for every boolean node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n null(node, path, context) {\n // this will be called for every null node in every document of the matching type\n // any patch returned will be applied to the document\n // you can also return mutations that touches other documents\n },\n },\n})\n`\n","export const renameField = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, at, setIfMissing, unset} from 'sanity/migrate'\n\nconst from = 'oldFieldName'\nconst to = 'newFieldName'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n document(doc, context) {\n return [\n at(to, setIfMissing(doc[from])),\n at(from, unset())\n ]\n }\n }\n})\n`\n","export const renameType = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {defineMigration, at, set} from 'sanity/migrate'\n\nconst oldType = 'old'\nconst newType = 'new'\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n object(object, path, context) {\n if (object._type === oldType) {\n return at('_type', set(newType))\n }\n }\n }\n})\n`\n","export const stringToPTE = ({\n migrationName,\n documentTypes,\n}: {\n migrationName: string\n documentTypes: string[]\n}) => `import {pathsAreEqual, stringToPath} from 'sanity'\nimport {defineMigration, set} from 'sanity/migrate'\n\nconst targetPath = stringToPath('some.path')\n\nexport default defineMigration({\n title: '${migrationName}',\n${\n documentTypes.length > 0\n ? ` documentTypes: [${documentTypes.map((t) => JSON.stringify(t)).join(', ')}],\\n`\n : ''\n}\n migrate: {\n string(node, path, ctx) {\n if (pathsAreEqual(path, targetPath)) {\n return set([\n {\n style: 'normal',\n _type: 'block',\n children: [\n {\n _type: 'span',\n marks: [],\n text: node,\n },\n ],\n markDefs: [],\n },\n ])\n }\n },\n },\n})\n`\n","import {existsSync, mkdirSync} from 'node:fs'\nimport {writeFile} from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {deburr} from 'lodash'\n\nimport {MIGRATIONS_DIRECTORY} from './constants'\nimport {minimalAdvanced} from './templates/minimalAdvanced'\nimport {minimalSimple} from './templates/minimalSimple'\nimport {renameField} from './templates/renameField'\nimport {renameType} from './templates/renameType'\nimport {stringToPTE} from './templates/stringToPTE'\n\nconst helpText = `\nExamples:\n # Create a new migration, prompting for title and options\n sanity migration create\n\n # Create a new migration with the provided title, prompting for options\n sanity migration create \"Rename field from location to address\"\n`\n\n// eslint-disable-next-line @typescript-eslint/no-empty-interface\ninterface CreateMigrationFlags {}\n\nconst TEMPLATES = [\n {name: 'Minimalistic migration to get you started', template: minimalSimple},\n {name: 'Rename an object type', template: renameType},\n {name: 'Rename a field', template: renameField},\n {name: 'Convert string field to Portable Text', template: stringToPTE},\n {\n name: 'Advanced template using async iterators providing more fine grained control',\n template: minimalAdvanced,\n },\n]\n\nconst createMigrationCommand: CliCommandDefinition<CreateMigrationFlags> = {\n name: 'create',\n group: 'migration',\n signature: '[TITLE]',\n helpText,\n description: 'Create a new migration within your project',\n action: async (args, context) => {\n const {output, prompt, workDir, chalk} = context\n\n let [title] = args.argsWithoutOptions\n\n while (!title?.trim()) {\n title = await prompt.single({\n type: 'input',\n suffix: ' (e.g. \"Rename field from location to address\")',\n message: 'Title of migration',\n })\n if (!title.trim()) {\n output.error(chalk.red('Name cannot be empty'))\n }\n }\n const types = await prompt.single({\n type: 'input',\n suffix: ' (optional)',\n message: 'Type of documents to migrate. You can add multiple types separated by comma',\n })\n\n const templatesByName = Object.fromEntries(TEMPLATES.map((t) => [t.name, t]))\n const template = await prompt.single({\n type: 'list',\n message: 'Select a template',\n choices: TEMPLATES.map((definedTemplate) => ({\n name: definedTemplate.name,\n value: definedTemplate.name,\n })),\n })\n\n const sluggedName = deburr(title.toLowerCase())\n .replace(/\\s+/g, '-')\n .replace(/[^a-z0-9-]/g, '')\n\n const destDir = path.join(workDir, MIGRATIONS_DIRECTORY, sluggedName)\n if (existsSync(destDir)) {\n if (\n !(await prompt.single({\n type: 'confirm',\n message: `Migration directory ${chalk.cyan(destDir)} already exists. Overwrite?`,\n default: false,\n }))\n ) {\n return\n }\n }\n mkdirSync(destDir, {recursive: true})\n\n const renderedTemplate = (templatesByName[template].template || minimalSimple)({\n migrationName: title,\n documentTypes: types\n .split(',')\n .map((t) => t.trim())\n .filter(Boolean),\n })\n\n const definitionFile = path.join(destDir, 'index.ts')\n\n await writeFile(definitionFile, renderedTemplate)\n // To dry run it, run \\`sanity migration run ${sluggedName}\\``)\n output.print()\n output.print(`${chalk.green('✓')} Migration created!`)\n output.print()\n output.print('Next steps:')\n output.print(\n `Open ${chalk.bold(\n definitionFile,\n )} in your code editor and write the code for your migration.`,\n )\n output.print(\n `Dry run the migration with:\\n\\`${chalk.bold(\n `sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> `,\n )}\\``,\n )\n output.print(\n `Run the migration against a dataset with:\\n \\`${chalk.bold(\n `sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> --no-dry-run`,\n )}\\``,\n )\n output.print()\n output.print(\n `👉 Learn more about schema and content migrations at ${chalk.bold(\n 'https://www.sanity.io/docs/schema-and-content-migrations',\n )}`,\n )\n },\n}\nexport default createMigrationCommand\n","import path from 'node:path'\n\nimport {type Migration} from '@sanity/migrate'\nimport {isPlainObject} from 'lodash'\n\nimport {MIGRATION_SCRIPT_EXTENSIONS, MIGRATIONS_DIRECTORY} from '../constants'\n\ninterface ResolvedMigrationScript {\n /**\n * Relative path from the working directory to the migration script\n */\n relativePath: string\n\n /**\n * Absolute path to the migration script\n */\n absolutePath: string\n\n /**\n * The migration module, if it could be resolved - otherwise `undefined`\n */\n mod?: {default: Migration; up?: unknown; down?: unknown}\n}\n\n/**\n * Resolves the potential paths to a migration script.\n * Considers the following paths (where `<ext>` is 'mjs', 'js', 'ts' or 'cjs'):\n *\n * - `<migrationsDir>/<migrationName>.<ext>`\n * - `<migrationsDir>/<migrationName>/index.<ext>`\n *\n * Note that all possible paths are returned, even if the files do not exist.\n * Check the `mod` property to see if a module could actually be loaded.\n *\n * @param workDir - Working directory of the studio\n * @param migrationName - The name of the migration directory to resolve\n * @returns An array of potential migration scripts\n * @internal\n */\nexport function resolveMigrationScript(\n workDir: string,\n migrationName: string,\n): ResolvedMigrationScript[] {\n return [migrationName, path.join(migrationName, 'index')].flatMap((location) =>\n MIGRATION_SCRIPT_EXTENSIONS.map((ext) => {\n const relativePath = path.join(MIGRATIONS_DIRECTORY, `${location}.${ext}`)\n const absolutePath = path.resolve(workDir, relativePath)\n let mod\n try {\n // eslint-disable-next-line import/no-dynamic-require\n mod = require(absolutePath)\n } catch (err) {\n if (err.code !== 'MODULE_NOT_FOUND') {\n throw new Error(`Error: ${err.message}\"`)\n }\n }\n return {relativePath, absolutePath, mod}\n }),\n )\n}\n\n/**\n * Checks whether or not the passed resolved migration script is actually loadable (eg has a default export)\n *\n * @param script - The resolved migration script to check\n * @returns `true` if the script is loadable, `false` otherwise\n * @internal\n */\nexport function isLoadableMigrationScript(\n script: ResolvedMigrationScript,\n): script is Required<ResolvedMigrationScript> {\n if (typeof script.mod === 'undefined' || !isPlainObject(script.mod.default)) {\n return false\n }\n\n const mod = script.mod.default\n return typeof mod.title === 'string' && mod.migrate !== undefined\n}\n","import {readdir} from 'node:fs/promises'\nimport path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {type Migration} from '@sanity/migrate'\nimport {Table} from 'console-table-printer'\nimport {register} from 'esbuild-register/dist/node'\n\nimport {MIGRATION_SCRIPT_EXTENSIONS, MIGRATIONS_DIRECTORY} from './constants'\nimport {isLoadableMigrationScript, resolveMigrationScript} from './utils/resolveMigrationScript'\n\nconst helpText = ``\n\nconst listMigrationCommand: CliCommandDefinition = {\n name: 'list',\n group: 'migration',\n signature: '',\n helpText,\n description: 'List available migrations',\n action: async (_, context) => {\n const {workDir, output, chalk} = context\n try {\n const migrations = await resolveMigrations(workDir)\n\n if (migrations.length === 0) {\n output.print('No migrations found in migrations folder of the project')\n output.print(\n `\\nRun ${chalk.green(`\\`sanity migration create <NAME>\\``)} to create a new migration`,\n )\n return\n }\n\n const table = new Table({\n title: `Found ${migrations.length} migrations in project`,\n columns: [\n {name: 'id', title: 'ID', alignment: 'left'},\n {name: 'title', title: 'Title', alignment: 'left'},\n ],\n })\n\n migrations.forEach((definedMigration) => {\n table.addRow({id: definedMigration.id, title: definedMigration.migration.title})\n })\n table.printTable()\n output.print('\\nRun `sanity migration run <ID>` to run a migration')\n } catch (error) {\n if (error.code === 'ENOENT') {\n output.print('No migrations folder found in the project')\n output.print(\n `\\nRun ${chalk.green(`\\`sanity migration create <NAME>\\``)} to create a new migration`,\n )\n return\n }\n throw new Error(`An error occurred while listing migrations: ${error.message}`)\n }\n },\n}\n\n/**\n * A resolved migration, where you are guaranteed that the migration file exists\n *\n * @internal\n */\nexport interface ResolvedMigration {\n id: string\n migration: Migration\n}\n\n/**\n * Resolves all migrations in the studio working directory\n *\n * @param workDir - The studio working directory\n * @returns Array of migrations and their respective paths\n * @internal\n */\nexport async function resolveMigrations(workDir: string): Promise<ResolvedMigration[]> {\n let unregister\n if (!__DEV__) {\n unregister = register({\n target: `node${process.version.slice(1)}`,\n supported: {'dynamic-import': true},\n }).unregister\n }\n\n const migrationsDir = path.join(workDir, MIGRATIONS_DIRECTORY)\n const migrationEntries = await readdir(migrationsDir, {withFileTypes: true})\n\n const migrations: ResolvedMigration[] = []\n for (const entry of migrationEntries) {\n const entryName = entry.isDirectory() ? entry.name : removeMigrationScriptExtension(entry.name)\n const candidates = resolveMigrationScript(workDir, entryName).filter(isLoadableMigrationScript)\n\n for (const candidate of candidates) {\n migrations.push({\n id: entryName,\n migration: candidate.mod.default,\n })\n }\n }\n\n if (unregister) {\n unregister()\n }\n\n return migrations\n}\n\nfunction removeMigrationScriptExtension(fileName: string) {\n // Remove `.ts`, `.js` etc from the end of a filename\n return MIGRATION_SCRIPT_EXTENSIONS.reduce(\n (name, ext) => (name.endsWith(`.${ext}`) ? path.basename(name, `.${ext}`) : name),\n fileName,\n )\n}\n\nexport default listMigrationCommand\n","export default {\n name: 'migration',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages content migrations for Content Lake datasets',\n}\n","import {isIndexSegment, isIndexTuple, isKeySegment, type Path} from '@sanity/types'\n\n// FIXME: de-dupe this\n// copy/paste of `pathToString` from 'sanity' to prevent circular imports\nfunction pathToString(path: Path): string {\n if (!Array.isArray(path)) {\n throw new Error('Path is not an array')\n }\n\n return path.reduce<string>((target, segment, i) => {\n if (isIndexSegment(segment)) {\n return `${target}[${segment}]`\n }\n\n if (isKeySegment(segment) && segment._key) {\n return `${target}[_key==\"${segment._key}\"]`\n }\n\n if (isIndexTuple(segment)) {\n const [from, to] = segment\n return `${target}[${from}:${to}]`\n }\n\n if (typeof segment === 'string') {\n const separator = i === 0 ? '' : '.'\n return `${target}${separator}${segment}`\n }\n\n throw new Error(`Unsupported path segment \\`${JSON.stringify(segment)}\\``)\n }, '')\n}\n\ninterface BaseNode {\n path: Path\n}\n\nexport interface Tree<Node extends BaseNode> {\n nodes?: Node[]\n children?: Record<string, Tree<Node>>\n}\n\n/**\n * Recursively calculates the max length of all the keys in the given validation\n * tree respecting extra length due to indentation depth. Used to calculate the\n * padding for the rest of the tree.\n */\nexport const maxKeyLength = (children: Record<string, Tree<BaseNode>> = {}, depth = 0): number => {\n return Object.entries(children)\n .map(([key, child]) =>\n Math.max(key.length + depth * 2, maxKeyLength(child.children, depth + 1)),\n )\n .reduce((max, next) => (next > max ? next : max), 0)\n}\n\ninterface Options<Node extends BaseNode> {\n node?: Record<string, Tree<Node>>\n paddingLength: number\n indent?: string\n getNodes?: (node: Tree<Node>) => Node[] | undefined\n getMessage: (node: Node) => string\n}\n\n/**\n * Recursively formats a given tree into a printed user-friendly tree structure\n */\nexport const formatTree = <Node extends BaseNode>({\n node = {},\n paddingLength,\n indent = '',\n getNodes: getLeaves = ({nodes}) => nodes,\n getMessage,\n}: Options<Node>): string => {\n const entries = Object.entries(node)\n\n return entries\n .map(([key, child], index) => {\n const isLast = index === entries.length - 1\n const nextIndent = `${indent}${isLast ? ' ' : '│ '}`\n const leaves = getLeaves(child)\n\n const nested = formatTree({\n node: child.children,\n paddingLength,\n indent: nextIndent,\n getNodes: getLeaves,\n getMessage,\n })\n\n if (!leaves?.length) {\n const current = `${indent}${isLast ? '└' : '├'}─ ${key}`\n return [current, nested].filter(Boolean).join('\\n')\n }\n\n const [first, ...rest] = leaves\n const firstPadding = '.'.repeat(paddingLength - indent.length - key.length)\n const elbow = isLast ? '└' : '├'\n const subsequentPadding = ' '.repeat(paddingLength - indent.length + 2)\n\n const firstMessage = `${indent}${elbow}─ ${key} ${firstPadding} ${getMessage(first)}`\n const subsequentMessages = rest\n .map((marker) => `${nextIndent}${subsequentPadding} ${getMessage(marker)}`)\n .join('\\n')\n\n const current = [firstMessage, subsequentMessages].filter(Boolean).join('\\n')\n return [current, nested].filter(Boolean).join('\\n')\n })\n .join('\\n')\n}\n\n/**\n * Converts a set of markers with paths into a tree of markers where the paths\n * are embedded in the tree\n */\nexport function convertToTree<const Node extends BaseNode>(nodes: Node[]): Tree<Node> {\n const root: Tree<Node> = {}\n\n // add the markers to the tree\n function addNode(node: Node, tree: Tree<Node> = root) {\n // if we've traversed the whole path\n if (!node.path.length) {\n if (!tree.nodes) tree.nodes = [] // ensure markers is defined\n\n // then add the marker to the front\n tree.nodes.push(node)\n return\n }\n\n const [current, ...rest] = node.path\n const key = pathToString([current])\n\n // ensure the current node has children and the next node\n if (!tree.children) tree.children = {}\n if (!(key in tree.children)) tree.children[key] = {}\n\n addNode({...node, path: rest}, tree.children[key])\n }\n\n for (const node of nodes) addNode(node)\n return root\n}\n","import {isatty} from 'node:tty'\n\nimport {type Migration, type Mutation, type NodePatch, type Transaction} from '@sanity/migrate'\nimport {type KeyedSegment} from '@sanity/types'\nimport {type Chalk} from 'chalk'\n\nimport {convertToTree, formatTree, maxKeyLength} from '../../util/tree'\n\ntype ItemRef = string | number\ntype Impact = 'destructive' | 'maybeDestructive' | 'incremental'\ntype Variant = Impact | 'info'\n\nconst isTty = isatty(1)\n\ninterface FormatterOptions<Subject> {\n chalk: Chalk\n subject: Subject\n migration: Migration\n indentSize?: number\n}\n\nexport function prettyFormat({\n chalk,\n subject,\n migration,\n indentSize = 0,\n}: FormatterOptions<Mutation | Transaction | (Mutation | Transaction)[]>): string {\n return (Array.isArray(subject) ? subject : [subject])\n .map((subjectEntry) => {\n if (subjectEntry.type === 'transaction') {\n return [\n [\n badge('transaction', 'info', chalk),\n typeof subjectEntry.id === 'undefined' ? null : chalk.underline(subjectEntry.id),\n ]\n .filter(Boolean)\n .join(' '),\n indent(\n prettyFormat({\n chalk,\n subject: subjectEntry.mutations,\n migration,\n indentSize: indentSize,\n }),\n ),\n ].join('\\n\\n')\n }\n return prettyFormatMutation({\n chalk,\n subject: subjectEntry,\n migration,\n indentSize,\n })\n })\n .join('\\n\\n')\n}\n\nfunction encodeItemRef(ref: number | KeyedSegment): ItemRef {\n return typeof ref === 'number' ? ref : ref._key\n}\n\nfunction badgeStyle(chalk: Chalk, variant: Variant): Chalk {\n const styles: Record<Variant, Chalk> = {\n info: chalk.bgWhite.black,\n incremental: chalk.bgGreen.black.bold,\n maybeDestructive: chalk.bgYellow.black.bold,\n destructive: chalk.bgRed.black.bold,\n }\n\n return styles[variant]\n}\n\nfunction badge(label: string, variant: Variant, chalk: Chalk): string {\n if (!isTty) {\n return `[${label}]`\n }\n\n return badgeStyle(chalk, variant)(` ${label} `)\n}\n\nconst mutationImpact: Record<Mutation['type'], Impact> = {\n create: 'incremental',\n createIfNotExists: 'incremental',\n createOrReplace: 'maybeDestructive',\n delete: 'destructive',\n patch: 'maybeDestructive',\n}\n\nfunction documentId(mutation: Mutation): string | undefined {\n if ('id' in mutation) {\n return mutation.id\n }\n\n if ('document' in mutation) {\n return mutation.document._id\n }\n\n return undefined\n}\n\nconst listFormatter = new Intl.ListFormat('en-US', {\n type: 'disjunction',\n})\n\nfunction mutationHeader(chalk: Chalk, mutation: Mutation, migration: Migration): string {\n const mutationType = badge(mutation.type, mutationImpact[mutation.type], chalk)\n\n const documentType =\n 'document' in mutation || migration.documentTypes\n ? badge(\n 'document' in mutation\n ? mutation.document._type\n : listFormatter.format(migration.documentTypes ?? []),\n 'info',\n chalk,\n )\n : null\n\n // TODO: Should we list documentType when a mutation can be yielded for any document type?\n return [mutationType, documentType, chalk.underline(documentId(mutation))]\n .filter(Boolean)\n .join(' ')\n}\n\nexport function prettyFormatMutation({\n chalk,\n subject,\n migration,\n indentSize = 0,\n}: FormatterOptions<Mutation>): string {\n const lock =\n 'options' in subject ? chalk.cyan(`(if revision==${subject.options?.ifRevision})`) : ''\n const header = [mutationHeader(chalk, subject, migration), lock].join(' ')\n const padding = ' '.repeat(indentSize)\n\n if (\n subject.type === 'create' ||\n subject.type === 'createIfNotExists' ||\n subject.type === 'createOrReplace'\n ) {\n return [header, '\\n', indent(JSON.stringify(subject.document, null, 2), indentSize)].join('')\n }\n\n if (subject.type === 'patch') {\n const tree = convertToTree<NodePatch>(subject.patches.flat())\n const paddingLength = Math.max(maxKeyLength(tree.children) + 2, 30)\n\n return [\n header,\n '\\n',\n formatTree<NodePatch>({\n node: tree.children,\n paddingLength,\n indent: padding,\n getMessage: (patch) => formatPatchMutation(chalk, patch),\n }),\n ].join('')\n }\n\n return header\n}\n\nfunction formatPatchMutation(chalk: Chalk, patch: NodePatch): string {\n const {op} = patch\n const formattedType = chalk.bold(op.type)\n if (op.type === 'unset') {\n return `${chalk.red(formattedType)}()`\n }\n if (op.type === 'diffMatchPatch') {\n return `${chalk.yellow(formattedType)}(${op.value})`\n }\n if (op.type === 'inc' || op.type === 'dec') {\n return `${chalk.yellow(formattedType)}(${op.amount})`\n }\n if (op.type === 'set') {\n return `${chalk.yellow(formattedType)}(${JSON.stringify(op.value)})`\n }\n if (op.type === 'setIfMissing') {\n return `${chalk.green(formattedType)}(${JSON.stringify(op.value)})`\n }\n if (op.type === 'insert') {\n return `${chalk.green(formattedType)}(${op.position}, ${encodeItemRef(\n op.referenceItem,\n )}, ${JSON.stringify(op.items)})`\n }\n if (op.type === 'replace') {\n return `${chalk.yellow(formattedType)}(${encodeItemRef(op.referenceItem)}, ${JSON.stringify(\n op.items,\n )})`\n }\n if (op.type === 'truncate') {\n return `${chalk.red(formattedType)}(${op.startIndex}, ${op.endIndex})`\n }\n // @ts-expect-error all cases are covered\n throw new Error(`Invalid operation type: ${op.type}`)\n}\n\nfunction indent(subject: string, size = 2): string {\n const padding = ' '.repeat(size)\n\n return subject\n .split('\\n')\n .map((line) => padding + line)\n .join('\\n')\n}\n","import path from 'node:path'\n\nimport {type CliCommandDefinition} from '@sanity/cli'\nimport {\n DEFAULT_MUTATION_CONCURRENCY,\n dryRun,\n MAX_MUTATION_CONCURRENCY,\n type Migration,\n type MigrationProgress,\n run,\n} from '@sanity/migrate'\nimport {Table} from 'console-table-printer'\nimport {register} from 'esbuild-register/dist/node'\nimport {hideBin} from 'yargs/helpers'\nimport yargs from 'yargs/yargs'\n\nimport {debug} from '../../debug'\nimport {MIGRATIONS_DIRECTORY} from './constants'\nimport {resolveMigrations} from './listMigrationsCommand'\nimport {prettyFormat} from './prettyMutationFormatter'\nimport {isLoadableMigrationScript, resolveMigrationScript} from './utils/resolveMigrationScript'\n\nconst helpText = `\nOptions\n --no-dry-run By default the migration runs in dry mode. Pass this option to migrate dataset.\n --concurrency <concurrent> How many mutation requests to run in parallel. Must be between 1 and ${MAX_MUTATION_CONCURRENCY}. Default: ${DEFAULT_MUTATION_CONCURRENCY}.\n --no-progress Don't output progress. Useful if you want debug your migration script and see the output of console.log() statements.\n --dataset <dataset> Dataset to migrate. Defaults to the dataset configured in your Sanity CLI config.\n --project <project id> Project ID of the dataset to migrate. Defaults to the projectId configured in your Sanity CLI config.\n --no-confirm Skip the confirmation prompt before running the migration. Make sure you know what you're doing before using this flag.\n --from-export <export.tar.gz> Use a local dataset export as source for migration instead of calling the Sanity API. Note: this is only supported for dry runs.\n\n\nExamples\n # dry run the migration\n sanity migration run <id>\n\n # execute the migration against a dataset\n sanity migration run <id> --no-dry-run --project xyz --dataset staging\n\n # execute the migration using a dataset export as the source\n sanity migration run <id> --from-export=production.tar.gz --no-dry-run --projectId xyz --dataset staging\n`\n\ninterface CreateFlags {\n ['dry-run']?: boolean\n concurrency?: number\n ['from-export']?: string\n progress?: boolean\n dataset?: string\n project?: string\n confirm?: boolean\n}\n\nfunction parseCliFlags(args: {argv?: string[]}) {\n return yargs(hideBin(args.argv || process.argv).slice(2))\n .options('dry-run', {type: 'boolean', default: true})\n .options('concurrency', {type: 'number', default: DEFAULT_MUTATION_CONCURRENCY})\n .options('progress', {type: 'boolean', default: true})\n .options('dataset', {type: 'string'})\n .options('from-export', {type: 'string'})\n .options('project', {type: 'string'})\n .options('confirm', {type: 'boolean', default: true}).argv\n}\n\nconst runMigrationCommand: CliCommandDefinition<CreateFlags> = {\n name: 'run',\n group: 'migration',\n signature: 'ID',\n helpText,\n description: 'Run a migration against a dataset',\n // eslint-disable-next-line max-statements\n action: async (args, context) => {\n const {apiClient, output, prompt, chalk, workDir} = context\n const [id] = args.argsWithoutOptions\n const migrationsDirectoryPath = path.join(workDir, MIGRATIONS_DIRECTORY)\n\n const flags = await parseCliFlags(args)\n\n const fromExport = flags.fromExport\n const dry = flags.dryRun\n const dataset = flags.dataset\n const project = flags.project\n\n if ((dataset && !project) || (project && !dataset)) {\n throw new Error('If either --dataset or --project is provided, both must be provided')\n }\n\n if (!id) {\n output.error(chalk.red('Error: Migration ID must be provided'))\n const migrations = await resolveMigrations(workDir)\n const table = new Table({\n title: `Migrations found in project`,\n columns: [\n {name: 'id', title: 'ID', alignment: 'left'},\n {name: 'title', title: 'Title', alignment: 'left'},\n ],\n })\n\n migrations.forEach((definedMigration) => {\n table.addRow({id: definedMigration.id, title: definedMigration.migration.title})\n })\n table.printTable()\n output.print('\\nRun `sanity migration run <ID>` to run a migration')\n\n return\n }\n\n if (!__DEV__) {\n register({\n target: `node${process.version.slice(1)}`,\n supported: {'dynamic-import': true},\n })\n }\n\n const candidates = resolveMigrationScript(workDir, id)\n const resolvedScripts = candidates.filter(isLoadableMigrationScript)\n\n if (resolvedScripts.length > 1) {\n // todo: consider prompt user about which one to run? note: it's likely a mistake if multiple files resolve to the same name\n throw new Error(\n `Found multiple migrations for \"${id}\" in ${chalk.cyan(migrationsDirectoryPath)}: \\n - ${candidates\n .map((candidate) => path.relative(migrationsDirectoryPath, candidate.absolutePath))\n .join('\\n - ')}`,\n )\n }\n\n const script = resolvedScripts[0]\n if (!script) {\n throw new Error(\n `No migration found for \"${id}\" in ${chalk.cyan(chalk.cyan(migrationsDirectoryPath))}. Make sure that the migration file exists and exports a valid migration as its default export.\\n\n Tried the following files:\\n - ${candidates\n .map((candidate) => path.relative(migrationsDirectoryPath, candidate.absolutePath))\n .join('\\n - ')}`,\n )\n }\n\n const mod = script.mod\n if ('up' in mod || 'down' in mod) {\n // todo: consider adding support for up/down as separate named exports\n // For now, make sure we reserve the names for future use\n throw new Error(\n 'Only \"up\" migrations are supported at this time, please use a default export',\n )\n }\n\n const migration: Migration = mod.default\n\n if (fromExport && !dry) {\n throw new Error('Can only dry run migrations from a dataset export file')\n }\n\n const concurrency = flags.concurrency\n if (concurrency !== undefined) {\n if (concurrency > MAX_MUTATION_CONCURRENCY) {\n throw new Error(\n `Concurrency exceeds the maximum allowed value of ${MAX_MUTATION_CONCURRENCY}`,\n )\n }\n\n if (concurrency === 0) {\n throw new Error(`Concurrency must be a positive number, got ${concurrency}`)\n }\n }\n\n const projectConfig = apiClient({\n requireUser: true,\n requireProject: false,\n }).config()\n\n if (!project && !projectConfig.projectId) {\n throw new Error(\n 'sanity.cli.js does not contain a project identifier (\"api.projectId\") and no --project option was provided.',\n )\n }\n\n const apiConfig = {\n dataset: dataset ?? projectConfig.dataset!,\n projectId: project ?? projectConfig.projectId!,\n apiHost: projectConfig.apiHost!,\n token: projectConfig.token!,\n apiVersion: 'v2024-01-29',\n } as const\n if (dry) {\n dryRunHandler()\n return\n }\n\n output.print(\n `\\n${chalk.yellow(chalk.bold('Note: During migrations, your webhooks stay active.'))}`,\n )\n output.print(\n `To adjust them, launch the management interface with ${chalk.cyan('sanity manage')}, navigate to the API settings, and toggle the webhooks before and after the migration as needed.\\n`,\n )\n\n if (flags.confirm) {\n const response = await prompt.single<boolean>({\n message: `This migration will run on the ${chalk.yellow(\n chalk.bold(apiConfig.dataset),\n )} dataset in ${chalk.yellow(chalk.bold(apiConfig.projectId))} project. Are you sure?`,\n type: 'confirm',\n })\n\n if (!response) {\n debug('User aborted migration')\n return\n }\n }\n\n const spinner = output.spinner(`Running migration \"${id}\"`).start()\n await run({api: apiConfig, concurrency, onProgress: createProgress(spinner)}, migration)\n spinner.stop()\n\n function createProgress(progressSpinner: ReturnType<typeof output.spinner>) {\n return function onProgress(progress: MigrationProgress) {\n if (!flags.progress) {\n progressSpinner.stop()\n return\n }\n if (progress.done) {\n progressSpinner.text = `Migration \"${id}\" completed.\n\n Project id: ${chalk.bold(apiConfig.projectId)}\n Dataset: ${chalk.bold(apiConfig.dataset)}\n\n ${progress.documents} documents processed.\n ${progress.mutations} mutations generated.\n ${chalk.green(progress.completedTransactions.length)} transactions committed.`\n progressSpinner.stopAndPersist({symbol: chalk.green('✔')})\n return\n }\n\n ;[null, ...progress.currentTransactions].forEach((transaction) => {\n progressSpinner.text = `Running migration \"${id}\" ${dry ? 'in dry mode...' : '...'}\n\n Project id: ${chalk.bold(apiConfig.projectId)}\n Dataset: ${chalk.bold(apiConfig.dataset)}\n Document type: ${chalk.bold(migration.documentTypes?.join(','))}\n\n ${progress.documents} documents processed…\n ${progress.mutations} mutations generated…\n ${chalk.blue(progress.pending)} requests pending…\n ${chalk.green(progress.completedTransactions.length)} transactions committed.\n\n ${\n transaction && !progress.done\n ? `» ${prettyFormat({chalk, subject: transaction, migration, indentSize: 2})}`\n : ''\n }`\n })\n }\n }\n\n async function dryRunHandler() {\n output.print(`Running migration \"${id}\" in dry mode`)\n\n if (fromExport) {\n output.print(`Using export ${chalk.cyan(fromExport)}`)\n }\n\n output.print()\n output.print(`Project id: ${chalk.bold(apiConfig.projectId)}`)\n output.print(`Dataset: ${chalk.bold(apiConfig.dataset)}`)\n\n for await (const mutation of dryRun({api: apiConfig, exportPath: fromExport}, migration)) {\n if (!mutation) continue\n output.print()\n output.print(\n prettyFormat({\n chalk,\n subject: mutation,\n migration,\n }),\n )\n }\n }\n },\n}\n\nexport default runMigrationCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new entry to the CORS-origins allow list.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity preview --host=0.0.0.0\n sanity preview --port=1942\n sanity preview some/build-output-dir\n`\n\nconst previewCommand: CliCommandDefinition = {\n name: 'preview',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Starts a server to preview a production build of Sanity Studio',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const previewAction = await getPreviewAction()\n\n return previewAction(args, context)\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default previewCommand\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type DeleteSchemaFlags} from '../../actions/schema/deleteSchemaAction'\n\nconst description = 'Delete schemas by their IDs.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --ids <schema_id_1,schema_id_2,...> comma-separated list of schema IDs to delete\n --dataset <dataset_name> delete schemas from a specific dataset\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n\nExamples\n # Delete single schema\n sanity schema delete --ids <schema_id>\n\n # Delete multiple schemas\n sanity schema delete --ids <schema_id_1,schema_id_2,...>\n`\n\nconst deleteSchemaCommand = {\n name: 'delete',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/deleteSchemaAction')\n\n return mod.default(args as unknown as CliCommandArguments<DeleteSchemaFlags>, context)\n },\n} satisfies CliCommandDefinition\n\nexport default deleteSchemaCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Extracts a JSON representation of a Sanity schema within a Studio context.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --workspace <name> The name of the workspace to generate a schema for\n --path Optional path to specify destination of the schema file\n --enforce-required-fields Makes the schema generated treat fields marked as required as non-optional. Defaults to false.\n --format=[groq-type-nodes] Format the schema as GROQ type nodes. Only available format at the moment.\n\nExamples\n # Extracts schema types in a Sanity project with more than one workspace\n sanity schema extract --workspace default\n`\n\nconst extractSchemaCommand: CliCommandDefinition = {\n name: 'extract',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/extractAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default extractSchemaCommand\n","export default {\n name: 'schema',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Interacts with Sanity Studio schema configurations',\n}\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type SchemaListFlags} from '../../actions/schema/schemaListAction'\n\nconst description = 'Lists all schemas in the current dataset.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions\n --json get schemas as json\n --id <schema_id> fetch a specific schema by its ID\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n\nExamples\n # Get full json schemas\n sanity schema list --json\n\n # Get a specific schema by ID\n sanity schema list --id <schema_id>\n`\n\nconst fetchSchemaCommand = {\n name: 'list',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/schemaListAction')\n\n return mod.default(args as unknown as CliCommandArguments<SchemaListFlags>, context)\n },\n} satisfies CliCommandDefinition\n\nexport default fetchSchemaCommand\n","import {type CliCommandArguments, type CliCommandDefinition} from '@sanity/cli'\n\nimport {type StoreManifestSchemasFlags} from '../../actions/schema/storeSchemasAction'\n\nconst description = 'Store schemas into workspace datasets.'\n\nconst helpText = `\n**Note**: This command is experimental and subject to change.\n\nOptions:\n --workspace <workspace_name> store schema for a specific workspace\n --manifest-dir <directory> directory containing your manifest file if it's not in the default location\n --id-prefix <prefix> add a prefix to the schema ID\n --schema-required fail if schema file is not found\n --verbose print detailed information during store\n\nExamples\n # if no options are provided all workspace schemas will be stored\n sanity schema store\n # Store the schema for only the workspace 'default'\n sanity schema store --workspace default\n`\n\nconst storeSchemaCommand = {\n name: 'store',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/storeSchemasAction')\n\n const extendedArgs = {\n ...args,\n extOptions: {\n ...args.extOptions,\n 'schema-required': true,\n },\n }\n\n return mod.default(\n extendedArgs as unknown as CliCommandArguments<StoreManifestSchemasFlags>,\n context,\n )\n },\n} satisfies CliCommandDefinition\n\nexport default storeSchemaCommand\n","import {type CliCommandDefinition} from '@sanity/cli'\n\nconst description = 'Validates all schema types specified in a workspace.'\n\nconst helpText = `\nOptions\n --workspace <name> The name of the workspace to use when validating all schema types.\n --format <pretty|ndjson|json> The output format used to print schema errors and warnings.\n --level <error|warning> The minimum level reported out. Defaults to warning.\n\nExamples\n # Validates all schema types in a Sanity project with more than one workspace\n sanity schema validate --workspace default\n\n # Save the results of the report into a file\n sanity schema validate > report.txt\n\n # Report out only errors\n sanity schema validate --level error\n`\n\nconst validateDocumentsCommand: CliCommandDefinition = {\n name: 'validate',\n group: 'schema',\n signature: '',\n description,\n helpText,\n action: async (args, context) => {\n const mod = await import('../../actions/schema/validateAction')\n\n return mod.default(args, context)\n },\n} satisfies CliCommandDefinition\n\nexport default validateDocumentsCommand\n","import {\n type CliCommandArguments,\n type CliCommandContext,\n type CliCommandDefinition,\n} from '@sanity/cli'\n\nimport {type StartPreviewServerCommandFlags} from '../../actions/preview/previewAction'\nimport {isInteractive} from '../../util/isInteractive'\nimport {getDevAction} from '../dev/devCommand'\n\nconst helpText = `\nNotes\n Changing the hostname or port number might require a new CORS-entry to be added.\n\nOptions\n --port <port> TCP port to start server on. [default: 3333]\n --host <host> The local network interface at which to listen. [default: \"127.0.0.1\"]\n\nExamples\n sanity start --host=0.0.0.0\n sanity start --port=1942\n sanity start some/build-output-dir\n`\n\nconst startCommand: CliCommandDefinition = {\n name: 'start',\n signature: '[BUILD_OUTPUT_DIR] [--port <port>] [--host <host>]',\n description: 'Alias for `sanity preview`',\n action: async (\n args: CliCommandArguments<StartPreviewServerCommandFlags>,\n context: CliCommandContext,\n ) => {\n const {output, chalk, prompt} = context\n const previewAction = await getPreviewAction()\n\n const warn = (msg: string) => output.warn(chalk.yellow.bgBlack(msg))\n const error = (msg: string) => output.warn(chalk.red.bgBlack(msg))\n warn('╭───────────────────────────────────────────────────────────╮')\n warn('│ │')\n warn(\"│ You're running Sanity Studio v3. In this version the │\")\n warn('│ [start] command is used to preview static builds. |')\n warn('│ │')\n warn('│ To run a development server, use the [npm run dev] or |')\n warn('│ [npx sanity dev] command instead. For more information, │')\n warn('│ see https://www.sanity.io/help/studio-v2-vs-v3 │')\n warn('│ │')\n warn('╰───────────────────────────────────────────────────────────╯')\n warn('') // Newline to separate from other output\n\n try {\n await previewAction(args, context)\n } catch (err) {\n if (err.name !== 'BUILD_NOT_FOUND') {\n throw err\n }\n\n error(err.message)\n error('\\n')\n\n const shouldRunDevServer =\n isInteractive &&\n (await prompt.single({\n message: 'Do you want to start a development server instead?',\n type: 'confirm',\n }))\n\n if (shouldRunDevServer) {\n const devAction = await getDevAction()\n await devAction(args, context)\n } else {\n // Indicate that this isn't an expected exit\n // eslint-disable-next-line no-process-exit\n process.exit(1)\n }\n }\n },\n helpText,\n}\n\nasync function getPreviewAction() {\n // NOTE: in dev-mode we want to include from `src` so we need to use `.ts` extension\n // NOTE: this `if` statement is not included in the output bundle\n if (__DEV__) {\n // eslint-disable-next-line import/extensions,@typescript-eslint/consistent-type-imports\n const mod: typeof import('../../actions/preview/previewAction') = require('../../actions/preview/previewAction.ts')\n\n return mod.default\n }\n\n const mod = await import('../../actions/preview/previewAction')\n\n return mod.default\n}\n\nexport default startCommand\n","export function prettifyQuotaError(message: string) {\n return (err: Error & {statusCode?: number}): Error & {statusCode?: number} => {\n if (err.statusCode === 402) {\n err.message = message\n throw err\n }\n\n throw err\n }\n}\n","import {type CliCommandDefinition, type CliPrompter} from '@sanity/cli'\n\nimport {prettifyQuotaError} from '../../util/prettifyQuotaError'\nimport {type Role} from './types'\n\nconst helpText = `\nOptions\n --role Role to invite the user as\n\nExamples\n # Invite a new user to the project (prompt for details)\n sanity users invite\n\n # Send a new user invite to the email \"pippi@sanity.io\", prompt for role\n sanity users invite pippi@sanity.io\n\n # Send a new user invite to the email \"pippi@sanity.io\", as administrator\n sanity users invite pippi@sanity.io --role administrator\n`\n\ninterface InviteFlags {\n role?: string\n}\n\nconst inviteUserCommand: CliCommandDefinition<InviteFlags> = {\n name: 'invite',\n group: 'users',\n signature: '[EMAIL]',\n helpText,\n description: 'Invite a new user to the project',\n action: async (args, context) => {\n const {apiClient, output, prompt} = context\n const [selectedEmail] = args.argsWithoutOptions\n const flags = args.extOptions\n\n const client = apiClient().clone().config({useProjectHostname: false, apiVersion: '2021-06-07'})\n const {projectId} = client.config()\n const roles = (await client.request<Role[]>({uri: `/projects/${projectId}/roles`})).filter(\n (role) => role.appliesToUsers,\n )\n const email = selectedEmail || (await promptForEmail(prompt))\n const selectedRole = flags.role || (await promptForRole(prompt, roles))\n const role = roles.find(({name}) => name.toLowerCase() === selectedRole.toLowerCase())\n if (!role) {\n throw new Error(`Role name \"${selectedRole}\" not found`)\n }\n\n await client\n .clone()\n .request({\n method: 'POST',\n uri: `/invitations/project/${projectId}`,\n body: {email, role: role.name},\n useGlobalApi: true,\n maxRedirects: 0,\n })\n .catch(\n prettifyQuotaError(\n 'Project is already at user quota, add billing details to the project in order to allow overage charges.',\n ),\n )\n\n output.print(`Invitation sent to ${email}`)\n },\n}\n\nexport default inviteUserCommand\n\nfunction promptForEmail(prompt: CliPrompter): Promise<string> {\n return prompt.single({\n type: 'input',\n message: 'Email to invite:',\n filter: (val) => val.trim(),\n validate: (name) => {\n if (!name || !name.includes('@')) {\n return 'Invalid email'\n }\n\n return true\n },\n })\n}\n\nfunction promptForRole(prompt: CliPrompter, roles: Role[]): Promise<string> {\n return prompt.single({\n type: 'list',\n message: 'Which role should the user have?',\n choices: roles.map((role) => ({\n value: role.name,\n name: `${role.title} (${role.description})`,\n })),\n })\n}\n","import {type CliCommandDefinition} from '@sanity/cli'\nimport {size, sortBy} from 'lodash'\n\nimport {type Invite, type PartialProjectResponse, type User} from './types'\n\nconst sortFields = ['id', 'name', 'role', 'date']\n\nconst helpText = `\nOptions\n --no-invitations Don't include pending invitations\n --no-robots Don't include robots (token users)\n --sort <field> Sort users by specified column: ${sortFields.join(', ')}\n --order <asc/desc> Sort output ascending/descending\n\nExamples\n # List all users of the project\n sanity users list\n\n # List all users of the project, but exclude pending invitations and robots\n sanity users list --no-invitations --no-robots\n\n # List all users, sorted by role\n sanity users list --sort role\n`\n\nconst listUsersCommand: CliCommandDefinition = {\n name: 'list',\n group: 'users',\n signature: '',\n helpText,\n description: 'List all users of the project',\n action: async (args, context) => {\n const {apiClient, output, chalk} = context\n const {sort, order, robots, invitations} = {\n sort: 'date',\n order: 'asc',\n robots: true,\n invitations: true,\n ...args.extOptions,\n }\n\n if (!sortFields.includes(sort)) {\n throw new Error(`Can't sort by field \"${sort}\". Must be one of ${sortFields.join(', ')}`)\n }\n\n if (order !== 'asc' && order !== 'desc') {\n throw new Error(`Unknown sort order \"${order}\", must be either \"asc\" or \"desc\"`)\n }\n\n const client = apiClient()\n const globalClient = client.clone().config({useProjectHostname: false})\n const {projectId} = client.config()\n\n const useGlobalApi = true\n const [pendingInvitations, project] = await Promise.all([\n invitations\n ? globalClient\n .request<Invite[]>({uri: `/invitations/project/${projectId}`, useGlobalApi})\n .then(getPendingInvitations)\n : [],\n globalClient.request<PartialProjectResponse>({uri: `/projects/${projectId}`, useGlobalApi}),\n ])\n\n const memberIds = project.members.map((member) => member.id)\n const users = await globalClient\n .request<User | User[]>({uri: `/users/${memberIds.join(',')}`, useGlobalApi})\n .then((user) => (Array.isArray(user) ? user : [user]))\n\n const projectMembers = project.members\n .map((member) => ({\n ...member,\n ...getUserProps(users.find((candidate) => candidate.id === member.id)),\n }))\n .filter((member) => !member.isRobot || robots)\n\n const members = [...projectMembers, ...pendingInvitations]\n\n const ordered = sortBy(\n members.map(({id, name, role, date}) => [id, name, role, date]),\n [sortFields.indexOf(sort)],\n )\n\n const rows = order === 'asc' ? ordered : ordered.reverse()\n\n const maxWidths = rows.reduce(\n (max, row) => row.map((current, index) => Math.max(size(current), max[index])),\n sortFields.map((str) => size(str)),\n )\n\n const printRow = (row: string[]) => {\n const isInvite = row[0] === '<pending>'\n const textRow = row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(' ')\n return isInvite ? chalk.dim(textRow) : textRow\n }\n\n output.print(chalk.cyan(printRow(sortFields)))\n rows.forEach((row) => output.print(printRow(row)))\n },\n}\n\nfunction getUserProps(user: User | undefined) {\n const {displayName: name, createdAt: date} = user || {}\n return {name: name || '', date: date || ''}\n}\n\nfunction getPendingInvitations(invitations: Invite[]) {\n return invitations\n .filter((invite) => !invite.isAccepted && !invite.isRevoked && !invite.acceptedByUserId)\n .map((invite) => ({\n id: '<pending>',\n name: invite.email,\n role: invite.role,\n date: invite.createdAt,\n }))\n}\n\nexport default listUsersCommand\n","import {type CliCommandGroupDefinition} from '@sanity/cli'\n\nexport const usersGroup: CliCommandGroupDefinition = {\n name: 'users',\n signature: '[COMMAND]',\n isGroupRoot: true,\n description: 'Manages users of your Sanity project',\n}\n\nexport default usersGroup\n","import {type CliCommandDefinition, type CliCommandGroupDefinition} from '@sanity/cli'\n\nimport {SCHEMA_STORE_ENABLED} from '../actions/schema/storeSchemasAction'\nimport appGroup from './app/appGroup'\nimport appBuildCommand from './app/buildCommand'\nimport appDeployCommand from './app/deployCommand'\nimport appDevCommand from './app/devCommand'\nimport appStartCommand from './app/startCommand'\nimport backupGroup from './backup/backupGroup'\nimport disableBackupCommand from './backup/disableBackupCommand'\nimport downloadBackupCommand from './backup/downloadBackupCommand'\nimport enableBackupCommand from './backup/enableBackupCommand'\nimport listBackupCommand from './backup/listBackupCommand'\nimport buildCommand from './build/buildCommand'\nimport addCorsOriginCommand from './cors/addCorsOriginCommand'\nimport corsGroup from './cors/corsGroup'\nimport deleteCorsOriginCommand from './cors/deleteCorsOriginCommand'\nimport listCorsOriginsCommand from './cors/listCorsOriginsCommand'\nimport aliasDatasetCommand from './dataset/alias/aliasCommands'\nimport copyDatasetCommand from './dataset/copyDatasetCommand'\nimport createDatasetCommand from './dataset/createDatasetCommand'\nimport datasetGroup from './dataset/datasetGroup'\nimport datasetVisibilityCommand from './dataset/datasetVisibilityCommand'\nimport deleteDatasetCommand from './dataset/deleteDatasetCommand'\nimport exportDatasetCommand from './dataset/exportDatasetCommand'\nimport importDatasetCommand from './dataset/importDatasetCommand'\nimport listDatasetsCommand from './dataset/listDatasetsCommand'\nimport deployCommand from './deploy/deployCommand'\nimport undeployCommand from './deploy/undeployCommand'\nimport devCommand from './dev/devCommand'\nimport createDocumentsCommand from './documents/createDocumentsCommand'\nimport deleteDocumentsCommand from './documents/deleteDocumentsCommand'\nimport documentsGroup from './documents/documentsGroup'\nimport getDocumentsCommand from './documents/getDocumentsCommand'\nimport queryDocumentsCommand from './documents/queryDocumentsCommand'\nimport validateDocumentsCommand from './documents/validateDocumentsCommand'\nimport execCommand from './exec/execCommand'\nimport deleteGraphQLAPICommand from './graphql/deleteGraphQLAPICommand'\nimport deployGraphQLAPICommand from './graphql/deployGraphQLAPICommand'\nimport graphqlGroup from './graphql/graphqlGroup'\nimport listGraphQLAPIsCommand from './graphql/listGraphQLAPIsCommand'\nimport createHookCommand from './hook/createHookCommand'\nimport deleteHookCommand from './hook/deleteHookCommand'\nimport hookGroup from './hook/hookGroup'\nimport listHookLogsCommand from './hook/listHookLogsCommand'\nimport listHooksCommand from './hook/listHooksCommand'\nimport printHookAttemptCommand from './hook/printHookAttemptCommand'\nimport extractManifestCommand from './manifest/extractManifestCommand'\nimport manifestGroup from './manifest/manifestGroup'\nimport createMigrationCommand from './migration/createMigrationCommand'\nimport listMigrationsCommand from './migration/listMigrationsCommand'\nimport migrationGroup from './migration/migrationGroup'\nimport runMigrationCommand from './migration/runMigrationCommand'\nimport previewCommand from './preview/previewCommand'\nimport deleteSchemaCommand from './schema/deleteSchemaCommand'\nimport extractSchemaCommand from './schema/extractSchemaCommand'\nimport schemaGroup from './schema/schemaGroup'\nimport fetchSchemaCommand from './schema/schemaListCommand'\nimport storeSchemaCommand from './schema/storeSchemaCommand'\nimport validateSchemaCommand from './schema/validateSchemaCommand'\nimport startCommand from './start/startCommand'\nimport inviteUserCommand from './users/inviteUserCommand'\nimport listUsersCommand from './users/listUsersCommand'\nimport usersGroup from './users/usersGroup'\n\n// Base commands that are always included\nconst baseCommands: (CliCommandDefinition | CliCommandGroupDefinition)[] = [\n appGroup,\n appDeployCommand,\n appDevCommand,\n appBuildCommand,\n appStartCommand,\n buildCommand,\n datasetGroup,\n deployCommand,\n undeployCommand,\n listDatasetsCommand,\n createDatasetCommand,\n datasetVisibilityCommand,\n exportDatasetCommand,\n importDatasetCommand,\n deleteDatasetCommand,\n copyDatasetCommand,\n aliasDatasetCommand,\n backupGroup,\n listBackupCommand,\n downloadBackupCommand,\n disableBackupCommand,\n enableBackupCommand,\n corsGroup,\n listCorsOriginsCommand,\n addCorsOriginCommand,\n deleteCorsOriginCommand,\n usersGroup,\n inviteUserCommand,\n listUsersCommand,\n hookGroup,\n listHooksCommand,\n createHookCommand,\n migrationGroup,\n createMigrationCommand,\n runMigrationCommand,\n listMigrationsCommand,\n deleteHookCommand,\n listHookLogsCommand,\n printHookAttemptCommand,\n documentsGroup,\n getDocumentsCommand,\n queryDocumentsCommand,\n deleteDocumentsCommand,\n createDocumentsCommand,\n validateDocumentsCommand,\n graphqlGroup,\n listGraphQLAPIsCommand,\n deployGraphQLAPICommand,\n deleteGraphQLAPICommand,\n devCommand,\n startCommand,\n schemaGroup,\n validateSchemaCommand,\n extractSchemaCommand,\n previewCommand,\n execCommand,\n manifestGroup,\n extractManifestCommand,\n]\n\n// Internal schema commands that are only included when enabled\nconst internalSchemaCommands = [fetchSchemaCommand, storeSchemaCommand, deleteSchemaCommand]\n\n// Include experimental commands only when the feature flag is enabled\nconst commands: (CliCommandDefinition | CliCommandGroupDefinition)[] = [\n ...baseCommands,\n ...(SCHEMA_STORE_ENABLED ? internalSchemaCommands : []),\n]\n\n/**\n * @deprecated Not actually deprecated, but these are internals and should not be relied upon outside of the Sanity team\n * @internal\n */\nexport const cliProjectCommands = {\n requiredCliVersionRange: '^3.0.0',\n commands,\n}\n"],"names":["getTimer","timings","startTimes","start","name","Error","performance","now","end","getTimings","MANIFEST_FILENAME","SCHEMA_FILENAME_SUFFIX","TOOLS_FILENAME_SUFFIX","FEATURE_ENABLED_ENV_NAME","EXTRACT_MANIFEST_ENABLED","process","env","EXTRACT_MANIFEST_LOG_ERRORS","SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS","CREATE_TIMER","EXTRACT_TASK_TIMEOUT_MS","minutesToMilliseconds","EXTRACT_FAILURE_MESSAGE","extractManifestSafe","args","context","extractManifest","err","output","error","workDir","flags","extOptions","defaultOutputDir","resolve","join","outputDir","defaultStaticPath","staticPath","path","rootPkgPath","readPkgUp","sync","cwd","__dirname","timer","spinner","workspaceManifests","getWorkspaceManifests","mkdir","recursive","workspaceFiles","writeWorkspaceFiles","manifest","version","createdAt","Date","toISOString","workspaces","writeFile","JSON","stringify","manifestDuration","succeed","toFixed","fail","message","print","chalk","gray","workerPath","dirname","worker","Worker","workerData","timeout","timeoutId","setTimeout","terminate","Promise","resolveWorkspaces","reject","buffer","addListener","push","exitCode","clearTimeout","manifestWorkspaces","reduce","workspace","writeWorkspaceFile","all","schemaFilename","toolsFilename","createFile","schema","tools","content","filenameSuffix","stringifiedContent","filename","createHash","update","digest","slice","SANITY_WORKSPACE_SCHEMA_TYPE","printSchemaList","schemas","ordered","sortBy","map","_createdAt","_id","id","dataset","projectId","String","headings","rows","reverse","maxWidths","max","row","current","index","Math","size","str","printRow","col","i","padEnd","cyan","forEach","schemaListAction","SCHEMA_STORE_ENABLED","apiClient","client","requireUser","requireProject","withConfig","apiVersion","config","manifestDir","manifestPath","getManifestPath","readManifest","allSettled","uniqBy","throwIfProjectIdMismatch","schemaRes","getDocument","useCdn","fetch","type","result","status","red","reason","value","flat","length","json","customPath","readAndParseManifest","readFileSync","lastModified","statSync","mtime","parse","readPath","groupOrCommand","argv","argsWithoutOptions","extraArguments","retryError","errorMessage","storeSchemasAction","schemaRequired","workspaceName","idPrefix","verbose","storedCount","saveSchema","transaction","createOrReplace","_type","commit","workspaceToSave","find","success","appGroup","signature","isGroupRoot","description","helpText","appBuildCommand","group","action","overrides","getBuildAction","default","appDeployCommand","appDevCommand","getDevAction","isInteractive","stdout","isTTY","TERM","devCommand","appStartCommand","prompt","previewAction","getPreviewAction","msg","warn","bgBlack","single","exit","defaultApiVersion","datasetBackupGroup","parseApiErr","apiErr","code","statusCode","statusMessage","response","body","data","debug","debugIt","validateDatasetName","datasetName","toLowerCase","MAX_DATASET_NAME_LENGTH","test","promptForDatasetName","options","validate","chooseDatasetPrompt","allowCreation","datasets","list","hasProduction","datasetChoices","selected","choices","Separator","newDatasetName","undefined","create","resolveApiClient","token","selectedDataset","disableDatasetBackupCommand","request","method","headers","Authorization","uri","enabled","green","require","archiver","archiveDir","tmpOutDir","outFilePath","progressCb","archiveDestination","createWriteStream","on","archive","gzip","gzipOptions","level","zlib","constants","Z_DEFAULT_COMPRESSION","stack","progress","fs","processedBytes","pipe","directory","finalize","maxBackupIdsShown","chooseBackupIdPrompt","query","limit","toString","backups","backupIdChoices","backup","cleanupTmpDir","tmpDir","rimraf","MAX_RETRIES","BACKOFF_DELAY_BASE","exponentialBackoff","retryCount","pow","withRetry","operation","maxRetries","retryDelay","CONNECTION_TIMEOUT","READ_TIMEOUT","getIt","keepAlive","promise","downloadAsset","url","fileName","fileType","outDir","normalizedFileName","basename","assetFilePath","getAssetFilePath","maxRedirects","connect","socket","stream","pipeline","downloadDocument","PaginatedGetBackupStream","Readable","cursor","totalFiles","constructor","backupId","objectMode","_read","fetchNextBackupPage","files","file","nextCursor","destroy","newProgress","startStep","lastProgress","step","elapsed","prettyMs","total","text","set","humanFileSize","floor","log","isPathDirName","filepath","createDebug","DEFAULT_DOWNLOAD_CONCURRENCY","MAX_DOWNLOAD_CONCURRENCY","parseCliFlags","yargs","hideBin","downloadBackupCommand","opts","prepareBackupOptions","outFileName","bold","progressSpinner","mkdtemp","tmpdir","dir","mkdirSync","tmpOutDocumentsFile","docOutStream","docWriteMutex","Mutex","backupFileStream","totalItemsDownloaded","pMap","doc","runExclusive","write","concurrency","finished","isString","defaultOutFileName","out","absolutify","filter","overwrite","existsSync","enableDatasetBackupCommand","DEFAULT_LIST_BACKUP_LIMIT","alias","listDatasetBackupCommand","Number","MAX_SAFE_INTEGER","before","after","parsedBefore","processDateFlags","parsedAfter","isAfter","table","Table","columns","title","alignment","addRow","resource","lightFormat","printTable","date","parsedDate","isValid","buildCommand","wildcardReplacement","portReplacement","addCorsOrigin","givenOrigin","origin","filterAndValidateOrigin","promptForOrigin","hasWildcard","includes","promptForWildcardConfirmation","allowCredentials","credentials","promptForCredentials","Boolean","oneline","yellow","logSymbols","warning","underline","replace","filterOrigin","validateOrigin","example","parsed","host","protocol","RegExp","addCorsOriginCommand","corsGroup","deleteCorsOriginCommand","originId","specified","specifiedOrigin","origins","listCorsOriginsCommand","validateDatasetAliasName","promptForDatasetAliasName","ALIAS_PREFIX","listAliases","createAlias","aliasName","modify","updateAlias","unlinkAlias","removeAlias","createAliasHandler","targetDataset","nameError","aliases","projectFeatures","then","sets","ds","aliasClient","aliasOutputName","startsWith","datasetErr","option","deleteAliasHandler","force","dsError","fetchedAliases","linkedAlias","elem","input","trim","linkAliasHandler","da","unlinkAliasHandler","aliasCommand","verb","listDatasetCopyJobs","offset","job","state","updatedAt","sourceDataset","withHistory","timeStarted","formatDistanceToNow","parseISO","timeTaken","formatDistance","color","getClientUrl","cdnUrl","Observable","observer","progressSource","EventSource","stopped","onError","close","next","onChannelError","onMessage","event","onComplete","removeEventListener","complete","addEventListener","followProgress","jobId","currentProgress","listenUrl","subscribe","copyDatasetCommand","attach","shouldSkipHistory","existingDatasets","sourceDatasetName","targetDatasetName","skipHistory","detach","allowedModes","createDatasetCommand","visibility","canCreatePrivate","aclMode","promptForDatasetVisibility","mode","datasetVisibilityCommand","edit","curr","deleteDatasetCommand","delete","noop","parseFlags","rawFlags","types","split","assetConcurrency","parseInt","raw","assets","drafts","compress","exportDatasetCommand","targetDestination","destinationPath","outputPath","getOutputPath","currentStep","onProgress","exportDataset","destination","dstPath","isAbsolute","dstStats","stat","catch","looksLikeFile","isFile","indexOf","createPath","finalPath","toBoolIfSet","flag","allowAssetsInDifferentDataset","allowFailingAssets","replaceAssets","skipCrossDatasetReferences","allowSystemDocuments","missing","importDatasetCommand","fromInitCommand","getMutationOperation","target","determineTargetDataset","isUrl","inputStream","assetsBase","sourceIsFolder","getUrlStream","sourceFile","fileStats","isDirectory","createReadStream","importClient","clone","stepStart","spinInterval","percent","lengthComputable","sameStep","getPercentage","clearInterval","prevStep","prevStepStart","timeSpent","secondsDecimalDigits","setInterval","endTask","numDocs","warnings","sanityImport","printWarnings","details","responseBody","padStart","onlyBody","assetFails","bind","listAliasesHandler","listDatasetsCommand","deployCommand","undeployCommand","createDocumentsCommand","watch","useJson5","json5","contentPath","readFile","writeDocuments","getResultMessage","docId","uuid","ext","tmpFile","os","defaultValue","editor","getEditor","registerUnlinkOnSigInt","chokidar","readAndPerformCreatesFromFile","execa","bin","concat","stdio","unlink","filePath","isEqual","writeResult","documents","docs","Array","isArray","mutations","validateDocument","isIdentifiedSanityDocument","createIfNotExists","arr","isSingle","isPlainObject","getErrorMessage","isSanityDocumentish","joiner","results","res","created","skipped","defaultEditor","platform","VISUAL","EDITOR","shift","deleteDocumentsCommand","ids","trx","deleted","notFound","pluralize","documentsGroup","identity","inp","colorizeJson","formatters","punctuator","white","key","string","number","literal","whitespace","tokenize","prevToken","getDocumentsCommand","pretty","project","anonymous","cliConfig","requireDataset","api","baseClient","originalDataset","originalProjectId","fallbackApiVersion","SANITY_CLI_QUERY_API_VERSION","validateDocumentsCommand","execCommand","deleteGraphQLAPICommand","deployGraphQLAPICommand","graphqlGroup","listGraphQLAPIsCommand","createHookCommand","manageUrl","projects","getById","organizationId","open","deleteHookCommand","hookId","promptForHook","specifiedName","hooks","hook","hookGroup","printHookAttemptCommand","attemptId","attempt","resultCode","resultBody","failureReason","inProgress","getStatus","isFailure","formatFailure","includeHelp","help","listHookLogsCommand","messages","attempts","groupedAttempts","groupBy","populated","totalMessages","printMessage","detailed","printSeparator","skip","failureCount","inspect","payload","colors","prefix","failure","duration","listHooksCommand","httpMethod","extractManifestCommand","extractError","MIGRATIONS_DIRECTORY","MIGRATION_SCRIPT_EXTENSIONS","minimalAdvanced","migrationName","documentTypes","t","minimalSimple","renameField","renameType","stringToPTE","TEMPLATES","template","createMigrationCommand","suffix","templatesByName","Object","fromEntries","definedTemplate","sluggedName","deburr","destDir","renderedTemplate","definitionFile","resolveMigrationScript","flatMap","location","relativePath","absolutePath","mod","isLoadableMigrationScript","script","migrate","listMigrationCommand","_","migrations","resolveMigrations","definedMigration","migration","unregister","register","supported","migrationsDir","migrationEntries","readdir","withFileTypes","entry","entryName","removeMigrationScriptExtension","candidates","candidate","endsWith","pathToString","segment","isIndexSegment","isKeySegment","_key","isIndexTuple","from","to","maxKeyLength","children","depth","entries","child","formatTree","node","paddingLength","indent","getNodes","getLeaves","nodes","getMessage","isLast","nextIndent","leaves","nested","first","rest","firstPadding","repeat","elbow","subsequentPadding","firstMessage","subsequentMessages","marker","convertToTree","root","addNode","tree","isTty","isatty","prettyFormat","subject","indentSize","subjectEntry","badge","prettyFormatMutation","encodeItemRef","ref","badgeStyle","variant","info","bgWhite","black","incremental","bgGreen","maybeDestructive","bgYellow","destructive","bgRed","label","mutationImpact","patch","documentId","mutation","document","listFormatter","Intl","ListFormat","mutationHeader","mutationType","documentType","format","lock","ifRevision","header","padding","patches","formatPatchMutation","op","formattedType","amount","position","referenceItem","items","startIndex","endIndex","line","MAX_MUTATION_CONCURRENCY","DEFAULT_MUTATION_CONCURRENCY","runMigrationCommand","migrationsDirectoryPath","fromExport","dry","dryRun","resolvedScripts","relative","projectConfig","apiConfig","apiHost","confirm","run","createProgress","stop","done","completedTransactions","stopAndPersist","symbol","currentTransactions","blue","pending","dryRunHandler","exportPath","previewCommand","deleteSchemaCommand","extractSchemaCommand","fetchSchemaCommand","storeSchemaCommand","extendedArgs","startCommand","prettifyQuotaError","inviteUserCommand","selectedEmail","useProjectHostname","roles","role","appliesToUsers","email","promptForEmail","selectedRole","promptForRole","useGlobalApi","val","sortFields","listUsersCommand","sort","order","robots","invitations","globalClient","pendingInvitations","getPendingInvitations","memberIds","members","member","users","user","getUserProps","isRobot","isInvite","textRow","dim","displayName","invite","isAccepted","isRevoked","acceptedByUserId","usersGroup","baseCommands","datasetGroup","aliasDatasetCommand","backupGroup","listBackupCommand","disableBackupCommand","enableBackupCommand","migrationGroup","listMigrationsCommand","queryDocumentsCommand","schemaGroup","validateSchemaCommand","manifestGroup","internalSchemaCommands","commands","cliProjectCommands","requiredCliVersionRange"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQO,SAASA,WAAyB;AACvC,QAAMC,UAAkC,IAClCC,aAAqC,CAAC;AAE5C,WAASC,MAAMC,MAAoB;AAC7B,QAAA,OAAOF,WAAWE,IAAI,IAAM;AAC9B,YAAM,IAAIC,MAAM,UAAUD,IAAI,qCAAqC;AAG1DA,eAAAA,IAAI,IAAIE,gBAAAA,YAAYC,IAAI;AAAA,EAAA;AAGrC,WAASC,IAAIJ,MAAsB;AAC7B,QAAA,OAAOF,WAAWE,IAAI,IAAM;AAC9B,YAAM,IAAIC,MAAM,UAAUD,IAAI,6BAA6B;AAGrDA,WAAAA,QAAAA,IAAI,IAAIE,4BAAYC,QAAQL,WAAWE,IAAI,GAC5CH,QAAQG,IAAI;AAAA,EAAA;AAGd,SAAA;AAAA,IAACD;AAAAA,IAAOK;AAAAA,IAAKC,YAAYA,MAAMR;AAAAA,EAAO;AAC/C;ACZO,MAAMS,oBAAoB,wBAC3BC,yBAAyB,uBACzBC,wBAAwB,sBAGxBC,6BAA2B,uCAC3BC,2BAA2BC,QAAQC,IAAIH,0BAAwB,MAAM,SACrEI,8BAA8BF,QAAQC,IAAIE,2CAA2C,QAErFC,eAAe,mBAEfC,0BAA0BC,QAAAA,sBAAsB,CAAC,GAEjDC,0BACJ;AAAA,8BAC+BT,0BAAwB;AAUnCU,eAAAA,oBACpBC,MACAC,SAC4B;AACvBX,MAAAA;AAID,QAAA;AACIY,YAAAA,gBAAgBF,MAAMC,OAAO;AACnC;AAAA,aACOE,KAAK;AACZ,aAAIV,+BACFQ,QAAQG,OAAOC,MAAMF,GAAG,GAEnBA;AAAAA,IAAAA;AAEX;AAEA,eAAeD,gBACbF,MACAC,SACe;AACT,QAAA;AAAA,IAACG;AAAAA,IAAQE;AAAAA,EAAWL,IAAAA,SAEpBM,QAAQP,KAAKQ,YACbC,mBAAmBC,KAAAA,QAAQC,KAAKL,KAAAA,SAAS,MAAM,CAAC,GAEhDM,YAAYF,aAAQD,gBAAgB,GACpCI,oBAAoBF,KAAAA,KAAKC,WAAW,QAAQ,GAE5CE,aAAaP,MAAMQ,QAAQF,mBAE3BE,SAAOJ,KAAAA,KAAKG,YAAY5B,iBAAiB,GAEzC8B,cAAcC,mBAAAA,QAAUC,KAAK;AAAA,IAACC,KAAKC;AAAAA,EAAU,CAAA,GAAGL;AACtD,MAAI,CAACC;AACG,UAAA,IAAInC,MAAM,oDAAoD;AAGtE,QAAMwC,QAAQ7C,SAAS;AACvB6C,QAAM1C,MAAMgB,YAAY;AACxB,QAAM2B,UAAUlB,OAAOkB,QAAQ,CAAA,CAAE,EAAE3C,MAAM,qBAAqB;AAE1D,MAAA;AACI4C,UAAAA,qBAAqB,MAAMC,sBAAsB;AAAA,MAACR;AAAAA,MAAaV;AAAAA,IAAAA,CAAQ;AAC7E,UAAMmB,GAAAA,MAAMX,YAAY;AAAA,MAACY,WAAW;AAAA,IAAA,CAAK;AAEzC,UAAMC,iBAAiB,MAAMC,oBAAoBL,oBAAoBT,UAAU,GAEzEe,WAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAM/BC,SAAS;AAAA,MACTC,YAAW,oBAAIC,KAAK,GAAEC,YAAY;AAAA,MAClCC,YAAYP;AAAAA,IACd;AAEA,UAAMQ,GAAAA,UAAUpB,QAAMqB,KAAKC,UAAUR,UAAU,MAAM,CAAC,CAAC;AACjDS,UAAAA,mBAAmBjB,MAAMrC,IAAIW,YAAY;AAE/C2B,YAAQiB,QAAQ,uBAAuBD,iBAAiBE,QAAAA,CAAS,KAAK;AAAA,WAC/DrC,KAAK;AACJsC,UAAAA,QAAAA,KAAKtC,IAAIuC,OAAO,GACxBtC,OAAOuC,MAAMC,uBAAMC,KAAK/C,uBAAuB,CAAC,GAC1CK;AAAAA,EAAAA;AAEV;AAEA,eAAeqB,sBAAsB;AAAA,EACnCR;AAAAA,EACAV;AAIF,GAAuC;AACrC,QAAMwC,aAAanC,KAAAA,KACjBoC,KAAQ/B,QAAAA,WAAW,GACnB,OACA,aACA,OACA,WACA,oBACF,GAEMgC,SAAS,IAAIC,oBAAAA,OAAOH,YAAY;AAAA,IACpCI,YAAY;AAAA,MAAC5C;AAAAA,IAAO;AAAA;AAAA,IAEpBd,KAAKD,QAAQC;AAAAA,EAAAA,CACd;AAED,MAAI2D,UAAU;AACRC,QAAAA,YAAYC,WAAW,MAAM;AACvB,cAAA,IACVL,OAAOM,UAAU;AAAA,KAChB1D,uBAAuB;AAEtB,MAAA;AACF,WAAO,MAAM,IAAI2D,QAAmC,CAACC,mBAAmBC,WAAW;AACjF,YAAMC,SAAoC,CAAE;AACrCC,aAAAA,YAAY,WAAYjB,CAAAA,YAAYgB,OAAOE,KAAKlB,OAAO,CAAC,GAC/DM,OAAOW,YAAY,QAASE,CAAa,aAAA;AACnCA,qBAAa,IACfL,kBAAkBE,MAAM,IACfP,WACTM,OAAO,IAAI5E,MAAM,sCAAsCe,uBAAuB,IAAI,CAAC;AAAA,MAEtF,CAAA,GACDoD,OAAOW,YAAY,SAASF,MAAM;AAAA,IAAA,CACnC;AAAA,EAAA,UACO;AACRK,iBAAaV,SAAS;AAAA,EAAA;AAE1B;AAEA,SAASxB,oBACPmC,oBACAjD,YACkC;AAClC,QAAMV,SAAS2D,mBAAmBC,OAChC,CAAC9B,YAAY+B,cACJ,CAAC,GAAG/B,YAAYgC,mBAAmBD,WAAWnD,UAAU,CAAC,GAElE,CAAA,CACF;AACOyC,SAAAA,QAAQY,IAAI/D,MAAM;AAC3B;AAEA,eAAe8D,mBACbD,WACAnD,YACgC;AAC1B,QAAA,CAACsD,gBAAgBC,aAAa,IAAI,MAAMd,QAAQY,IAAI,CACxDG,WAAWxD,YAAYmD,UAAUM,QAAQpF,sBAAsB,GAC/DmF,WAAWxD,YAAYmD,UAAUO,OAAOpF,qBAAqB,CAAC,CAC/D;AAEM,SAAA;AAAA,IACL,GAAG6E;AAAAA,IACHM,QAAQH;AAAAA,IACRI,OAAOH;AAAAA,EACT;AACF;AAEA,MAAMC,aAAa,OAAOvD,QAAc0D,SAAcC,mBAA2B;AACzEC,QAAAA,qBAAqBvC,KAAKC,UAAUoC,SAAS,MAAM,CAAC,GAEpDG,WAAW,GADJC,uBAAW,MAAM,EAAEC,OAAOH,kBAAkB,EAAEI,OAAO,KAAK,EAC9CC,MAAM,GAAG,CAAC,CAAC,GAAGN,cAAc;AAGrD,SAAA,MAAMvC,aAAUxB,UAAKI,QAAM6D,QAAQ,GAAGD,kBAAkB,GAEjDC;AACT;;;;;;AC5KO,MAAMK,+BAA+B,2BAEtCC,kBAAkBA,CAAC;AAAA,EACvBC;AAAAA,EACA/E;AAC2D,MAAM;AACjE,QAAMgF,UAAUC,gBAAAA,QACdF,QAAQG,IAAI,CAAC;AAAA,IAACC,YAAYxD;AAAAA,IAAWyD,KAAKC;AAAAA,IAAIxB;AAAAA,EAAAA,MACrC,CAACwB,IAAIxB,UAAUrF,MAAMqF,UAAUyB,SAASzB,UAAU0B,WAAW5D,SAAS,EAAEuD,IAAIM,MAAM,CAC1F,GACD,CAAC,WAAW,CACd,GACMC,WAAW,CAAC,MAAM,aAAa,WAAW,aAAa,WAAW,GAClEC,OAAOV,QAAQW,WAEfC,YAAYF,KAAK9B,OACrB,CAACiC,KAAKC,QAAQA,IAAIZ,IAAI,CAACa,SAASC,UAAUC,KAAKJ,IAAIK,cAAAA,QAAKH,OAAO,GAAGF,IAAIG,KAAK,CAAC,CAAC,GAC7EP,SAASP,IAAKiB,CAAAA,QAAQD,cAAAA,QAAKC,GAAG,CAAC,CACjC,GAEMC,WAAYN,CAAkBA,QAAAA,IAAIZ,IAAI,CAACmB,KAAKC,MAAM,GAAGD,GAAG,GAAGE,OAAOX,UAAUU,CAAC,CAAC,CAAC,EAAE/F,KAAK,KAAK;AAEjGP,SAAOuC,MAAMC,eAAMgE,QAAAA,KAAKJ,SAASX,QAAQ,CAAC,CAAC,GAC3CC,KAAKe,QAASX,SAAQ9F,OAAOuC,MAAM6D,SAASN,GAAG,CAAC,CAAC;AACnD;AAE8BY,eAAAA,iBAC5B9G,MACAC,SACe;AACf,MAAI,CAAC8G;AACH;AAGF,QAAMxG,QAAQP,KAAKQ;AACnB,MAAI,OAAOD,MAAMkF,MAAO,UAAiB,OAAA,IAAI5G,MAAM,oBAAoB;AACnE,MAAA,OAAO0B,MAAM,cAAc,KAAM,UAAiB,OAAA,IAAI1B,MAAM,6BAA6B;AAEvF,QAAA;AAAA,IAACmI;AAAAA,IAAW5G;AAAAA,EAAAA,IAAUH,SACtBgH,SAASD,UAAU;AAAA,IACvBE,aAAa;AAAA,IACbC,gBAAgB;AAAA,EACjB,CAAA,EAAEC,WAAW;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc,GAEnC1B,YAAYsB,OAAOK,SAAS3B,WAC5BD,UAAUuB,OAAOK,OAAAA,EAAS5B;AAE5B,MAAA,CAACC,aAAa,CAACD,SAAS;AAC1BtF,WAAOC,MAAM,yCAAyC;AACtD;AAAA,EAAA;AAGIkH,QAAAA,cAAchH,MAAM,cAAc,GAClCiH,eAAeC,gBAAgBxH,SAASsH,WAAW,GACnD1F,WAAW,MAAM6F,aAAaF,cAAcvH,OAAO,GAiCnDkF,WA9BU,MAAM5B,QAAQoE,WAC5BC,gBAAAA,QAA8B/F,SAASK,YAAY,SAAS,EAAEoD,IAAI,OAAOrB,cAAc;AAErF,QADA4D,yBAAyB5D,WAAW0B,SAAS,GACzCpF,MAAMkF,IAAI;AAENqC,YAAAA,YAAY,MAAMb,OACrBG,WAAW;AAAA,QACV1B,SAASzB,UAAUyB;AAAAA,QACnBC,WAAW1B,UAAU0B;AAAAA,MAAAA,CACtB,EACAoC,YAAYxH,MAAMkF,EAAE;AACvB,UAAI,CAACqC;AACG,cAAA,IAAIjJ,MAAM,WAAW0B,MAAMkF,EAAE,2BAA2BxB,UAAUyB,OAAO,GAAG;AAE7EoC,aAAAA;AAAAA,IAAAA;AAGF,WAAA,MAAMb,OACVG,WAAW;AAAA,MACV1B,SAASzB,UAAUyB;AAAAA,MACnBC,WAAW1B,UAAU0B;AAAAA,MACrBqC,QAAQ;AAAA,IAAA,CACT,EACAC,MAAwB,qBAAqB;AAAA,MAC5CC,MAAMjD;AAAAA,IAAAA,CACP;AAAA,EACJ,CAAA,CACH,GAIGK,IAAI,CAAC6C,QAAQ/B,UAAU;AAClB+B,QAAAA,OAAOC,WAAW,YAAY;AAC1BnE,YAAAA,YAAYpC,SAASK,WAAWkE,KAAK;AAC3ChG,aAAAA,OAAOC,MACLuC,eAAAA,QAAMyF,IACJ,0CAA0CpE,UAAUrF,IAAI,MAAMuJ,OAAOG,OAAO5F,OAAO,EACrF,CACF,GACO,CAAE;AAAA,IAAA;AAEX,WAAOyF,OAAOI;AAAAA,EACf,CAAA,EACAC,KAAK;AAEJrD,MAAAA,QAAQsD,WAAW,GAAG;AACxBrI,WAAOC,MAAM,kBAAkB;AAC/B;AAAA,EAAA;AAGEE,QAAMmI,OACRtI,OAAOuC,MAAM,GAAGP,KAAKC,UAAU9B,MAAMkF,KAAKN,QAAQ,CAAC,IAAIA,SAAS,MAAM,CAAC,CAAC,EAAE,IAE1ED,gBAAgB;AAAA,IAACC;AAAAA,IAAsC/E;AAAAA,EAAAA,CAAO;AAElE;;;;;;AC7HA,MAAMf,2BAA2B,mCACpB0H,uBAAuBxH,QAAQC,IAAIH,wBAAwB,MAAM,QAUjEoI,kBAAkBA,CAACxH,SAA4B0I,eAAwB;AAClF,QAAMlI,mBAAmBC,KAAAA,QAAQC,KAAAA,KAAKV,QAAQK,SAAS,MAAM,CAAC,GAExDM,YAAYF,KAAQD,QAAAA,gBAAgB,GACpCI,oBAAoBF,UAAKC,WAAW,QAAQ,GAE5CE,aAAa6H,cAAc9H;AAEjC,SADqBE,cAAKL,QAAAA,QAAQnB,QAAQ4B,IAAAA,GAAOL,UAAU;AAE7D,GAKM8H,uBAAuBA,CAACpB,cAAsBvH,YAA+B;AAC3EwE,QAAAA,UAAUoE,KAAAA,aAAarB,cAAc,OAAO,GAE5CsB,eADQC,KAAAA,SAASvB,YAAY,EACRwB,MAAM/G,YAAY;AAC7ChC,SAAAA,QAAQG,OAAOuC,MACbC,eAAAA,QAAMC,KAAK,6BAAwB2E,YAAY,oBAAoBsB,YAAY,GAAG,CACpF,GACO1G,KAAK6G,MAAMxE,OAAO;AAC3B,GAEaiD,eAAe,OAAOwB,UAAkBjJ,YAA+B;AAClF,QAAMuH,eAAe,GAAG0B,QAAQ,IAAIhK,iBAAiB;AAEjD,MAAA;AACK0J,WAAAA,qBAAqBpB,cAAcvH,OAAO;AAAA,EAAA,QACnC;AACd,UAAMF,oBACJ;AAAA,MACES,YAAY;AAAA,QAACO,MAAMmI;AAAAA,MAAQ;AAAA,MAC3BC,gBAAgB;AAAA,MAChBC,MAAM,CAAE;AAAA,MACRC,oBAAoB,CAAE;AAAA,MACtBC,gBAAgB,CAAA;AAAA,OAElBrJ,OACF;AAGI,QAAA;AACK2I,aAAAA,qBAAqBpB,cAAcvH,OAAO;AAAA,aAC1CsJ,YAAY;AACbC,YAAAA,eAAe,8BAA8BhC,YAAY;AAEvDpH,YAAAA,QAAAA,OAAOC,MAAMmJ,YAAY,GAC3BD;AAAAA,IAAAA;AAAAA,EACR;AAEJ,GAGa1B,2BAA2BA,CACtC5D,WACA0B,cACS;AACT,MAAI1B,UAAU0B,cAAcA;AACpB,UAAA,IAAI9G,MACR,uDAAkDoF,UAAUrF,IAAI,oBAAoBqF,UAAU0B,SAAS,EACzG;AAEJ;AAE8B8D,eAAAA,mBAC5BzJ,MACAC,SAC4B;AAC5B,MAAI,CAAC8G;AACH;AAGF,QAAMxG,QAAQP,KAAKQ,YAEbkJ,iBAAiBnJ,MAAM,iBAAiB,GACxCoJ,gBAAgBpJ,MAAM0D,WACtB2F,WAAWrJ,MAAM,WAAW,GAC5BsJ,UAAUtJ,MAAMsJ,SAChBtC,cAAchH,MAAM,cAAc;AAExC,MAAI,OAAOgH,eAAgB,UAAiB,OAAA,IAAI1I,MAAM,6BAA6B;AACnF,MAAI,OAAO+K,YAAa,UAAiB,OAAA,IAAI/K,MAAM,oBAAoB;AACvE,MAAI,OAAO8K,iBAAkB,UAAiB,OAAA,IAAI9K,MAAM,oBAAoB;AAEtE,QAAA;AAAA,IAACuB;AAAAA,IAAQ4G;AAAAA,EAAa/G,IAAAA,SAEtBuH,eAAeC,gBAAgBxH,SAASsH,WAAW;AAErD,MAAA;AACF,UAAMN,SAASD,UAAU;AAAA,MACvBE,aAAa;AAAA,MACbC,gBAAgB;AAAA,IACjB,CAAA,EAAEC,WAAW;AAAA,MAACC,YAAY;AAAA,IAAc,CAAA,GAEnC1B,YAAYsB,OAAOK,OAAS3B,EAAAA;AAClC,QAAI,CAACA,UAAiB,OAAA,IAAI9G,MAAM,2BAA2B;AAE3D,UAAMgD,WAAW,MAAM6F,aAAaF,cAAcvH,OAAO;AAEzD,QAAI6J,cAAc,GAEdzJ;AAEE0J,UAAAA,aAAa,OAAO9F,cAAqC;AACvDwB,YAAAA,KAAK,GAAGmE,WAAW,GAAGA,QAAQ,MAAM,EAAE,GAAG3E,4BAA4B,IAAIhB,UAAUrF,IAAI;AACzF,UAAA;AACFiJ,iCAAyB5D,WAAW0B,SAAS;AACvCpB,cAAAA,SAASnC,KAAK6G,MAClBJ,KAAa,aAAA,GAAGrB,YAAY,IAAIvD,UAAUM,MAAM,IAAI,OAAO,CAC7D;AACA,cAAM0C,OACHG,WAAW;AAAA,UACV1B,SAASzB,UAAUyB;AAAAA,UACnBC,WAAW1B,UAAU0B;AAAAA,QAAAA,CACtB,EACAqE,YAAY,EACZC,gBAAgB;AAAA,UAACC,OAAOjF;AAAAA,UAA8BO,KAAKC;AAAAA,UAAIxB;AAAAA,UAAWM;AAAAA,QAAAA,CAAO,EACjF4F,OACHL,GAAAA;AAAAA,eACO3J,KAAK;AAKZ,YAJAE,QAAQF,KACRC,OAAOC,MACL,uCAAuC4D,UAAUrF,IAAI;AAAA,EAAOgE,uBAAMyF,IAAI,GAAGlI,IAAIuC,OAAO,EAAE,CAAC,EACzF,GACIgH,eAAsBvJ,OAAAA;AAAAA,MAAAA,UAClB;AACJ0J,mBACFzJ,OAAOuC,MACLC,eAAAA,QAAMC,KAAK,oBAAe4C,EAAE,gBAAgBE,SAAS,cAAc1B,UAAUyB,OAAO,EAAE,CACxF;AAAA,MAAA;AAAA,IAGN;AAGA,QAAIiE,eAAe;AACjB,YAAMS,kBAAkBvI,SAASK,WAAWmI,KACzCpG,CAAqCA,cAAAA,UAAUrF,SAAS+K,aAC3D;AACA,UAAI,CAACS;AACI/J,cAAAA,OAAAA,MAAM,aAAasJ,aAAa,wBAAwB,GACzD,IAAI9K,MAAM,aAAa8K,aAAa,sCAAsChE,SAAS,EAAE;AAE7F,YAAMoE,WAAWK,eAAwC,GACzDhK,OAAOkK,QAAQ,kBAAkB;AAAA,IACnC;AACE,YAAM/G,QAAQY,IACZtC,SAASK,WAAWoD,IAAI,OAAOrB,cAAoD;AACjF,cAAM8F,WAAW9F,SAAS;AAAA,MAC3B,CAAA,CACH,GACA7D,OAAOkK,QAAQ,UAAUR,WAAW,IAAIjI,SAASK,WAAWuG,MAAM,UAAU;AAG9E,QAAIpI,MAAaA,OAAAA;AACjB;AAAA,WACOF,KAAK;AAEZ,QAAIuJ,eAAsBvJ,OAAAA;AACnBA,WAAAA;AAAAA,EAAAA,UACC;AACDwC,WAAAA,MAAM,GAAGC,eAAAA,QAAMC,KAAK,kCAA6B,CAAC,IAAID,eAAAA,QAAMgE,KAAK,oBAAoB,CAAC,EAAE;AAAA,EAAA;AAEnG;;;;;;;;;AC3LA,MAAM2D,WAAsC;AAAA,EAC1C3L,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCCMC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAWXC,kBAAwC;AAAA,EAC5ChM,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,SACA8K,eAEoB,MAAMC,oBAEPhL,MAAMC,SAAS8K,SAAS;AAAA,EAE7CJ,UAAAA;AACF;AAEA,eAAeK,mBAAiB;AAUlB,UAAA,MAAM;mBAAO,kBAAiC;AAAA,EAAA,CAAA,EAAA,KAAA,SAAA,GAAA;AAAA,WAAA,EAAA;AAAA,EAAA,CAAA,GAE/CC;AACb;ACxCA,MAAMN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA,IAKb5D,uBAAuB,0DAA0D,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQjFmE,mBAAyC;AAAA,EAC7CtM,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,mBAAmC;AAAA,MAEjDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GC5BMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXQ,gBAAsC;AAAA,EAC1CvM,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEkB,MAAMmL,eAAa,GAEpBpL,MAAMC,OAAO;AAAA,EAEhC0K,UAAAA;AACF;AAEA,eAAsBS,iBAKpB;AAUY,UAAA,MAAM;mBAAO,gBAA6B;AAAA,EAAA,CAAA,GAE3CH;AACb;ACtDaI,MAAAA,gBACX9L,QAAQ+L,OAAOC,SAAShM,QAAQC,IAAIgM,SAAS,UAAU,EAAE,QAAQjM,QAAQC,MCMrEmL,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXc,aAAmC;AAAA,EACvC7M,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEkB,MAAMmL,aAAa,GAEpBpL,MAAMC,OAAO;AAAA,EAEhC0K,UAAAA;AACF;AAEA,eAAsBS,eAKpB;AAUY,UAAA,MAAM;mBAAO,gBAA6B;AAAA,EAAA,CAAA,GAE3CH;AACb;AC5CA,MAAMN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXe,kBAAwC;AAAA,EAC5C9M,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,YACG;AACG,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO+I;AAAAA,IAAU1L,IAAAA,SAC1B2L,gBAAgB,MAAMC,mBAEtBxL,GAAAA,QAASyL,CAAgB1L,QAAAA,OAAO2L,KAAKnJ,OAAMyF,IAAI2D,QAAQF,GAAG,CAAC;AAE7D,QAAA;AACIF,YAAAA,cAAc5L,MAAMC,OAAO;AAAA,aAC1BE,KAAK;AACZ,UAAIA,IAAIvB,SAAS;AACTuB,cAAAA;AAGFA,YAAAA,IAAIuC,OAAO,GACjBrC,MAAM;AAAA,CAAI,GAGRgL,iBACC,MAAMM,OAAOM,OAAO;AAAA,QACnBvJ,SAAS;AAAA,QACTwF,MAAM;AAAA,MAAA,CACP,IAID,OADkB,MAAMkD,gBACRpL,MAAMC,OAAO,IAI7BV,QAAQ2M,KAAK,CAAC;AAAA,IAAA;AAAA,EAGpB;AAAA,EACAvB,UAAAA;AACF;AAEA,eAAekB,qBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC9EO,MAAMkB,sBAAoB,eAE3BC,qBAAgD;AAAA,EACpDxN,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbD,aAAa;AACf;ACAA,SAAS4B,YAAYlM,KAAkB;AACrC,QAAMmM,SAAS,CAAC;AACZnM,SAAAA,IAAIoM,OACND,OAAOE,aAAarM,IAAIoM,OACfpM,IAAIqM,eACbF,OAAOE,aAAarM,IAAIqM,aAGtBrM,IAAIuC,UACN4J,OAAO5J,UAAUvC,IAAIuC,UACZvC,IAAIsM,gBACbH,OAAO5J,UAAUvC,IAAIsM,gBACZtM,KAAKuM,UAAUC,MAAMjK,UAC9B4J,OAAO5J,UAAUvC,IAAIuM,SAASC,KAAKjK,UAC1BvC,KAAKuM,UAAUE,MAAMlK,UAC9B4J,OAAO5J,UAAUvC,IAAIuM,SAASE,KAAKlK,UAGnC4J,OAAO5J,UAAUN,KAAKC,UAAUlC,GAAG,GAG9BmM;AACT;AC9BaO,MAAAA,UAAQC,uBAAQ,aAAa;ACAnC,SAASC,oBAAoBC,aAAqC;AACvE,MAAI,CAACA;AACI,WAAA;AAGHpO,QAAAA,OAAO,GAAGoO,WAAW;AAEvBpO,SAAAA,KAAKqO,YAAY,MAAMrO,OAClB,kDAGLA,KAAK6J,SAAS,IACT,sDAGL7J,KAAK6J,SAASyE,KACT,+CAGJ,YAAYC,KAAKvO,IAAI,IAIrB,wBAAwBuO,KAAKvO,IAAI,IAIlC,QAAQuO,KAAKvO,IAAI,IACZ,2DAGF,KAPE,4EAJA;AAYX;AC9BO,SAASwO,qBACdzB,QACA0B,UAAgD,IAC/B;AACjB,SAAO1B,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACT4K,UAAW1O,CAAS,SACNmO,oBAAoBnO,IAAI,KAK7B;AAAA,IAET,GAAGyO;AAAAA,EAAAA,CACJ;AACH;AChBA,eAAsBE,oBACpBtN,SACAoN,UAAuD,IACtC;AACX,QAAA;AAAA,IAACrG;AAAAA,IAAW2E;AAAAA,MAAU1L,SACtB;AAAA,IAACyC;AAAAA,IAAS8K;AAAAA,EAAAA,IAAiBH,SAC3BpG,SAASD,aAETyG,WAAW,MAAMxG,OAAOwG,SAASC,KAAK,GACtCC,gBAAgBF,SAASpD,KAAM3E,aAAYA,QAAQ9G,SAAS,YAAY,GACxEgP,iBAAiBH,SAASnI,IAAKI,CAAa,aAAA;AAAA,IAAC6C,OAAO7C,QAAQ9G;AAAAA,EAAM,EAAA,GAClEiP,WAAW,MAAMlC,OAAOM,OAAO;AAAA,IACnCvJ,SAASA,WAAW;AAAA,IACpBwF,MAAM;AAAA,IACN4F,SAASN,gBACL,CAAC;AAAA,MAACjF,OAAO;AAAA,MAAO3J,MAAM;AAAA,OAAuB,IAAI+M,OAAOoC,aAAa,GAAGH,cAAc,IACtFA;AAAAA,EAAAA,CACL;AAED,MAAIC,aAAa,OAAO;AACtBhB,YAAM,wDAAwD;AACxDmB,UAAAA,iBAAiB,MAAMZ,qBAAqBzB,QAAQ;AAAA,MACxDjJ,SAAS;AAAA,MACTuI,SAAS0C,gBAAgBM,SAAY;AAAA,IAAA,CACtC;AACD,WAAA,MAAMhH,OAAOwG,SAASS,OAAOF,cAAc,GACpCA;AAAAA,EAAAA;AAGFH,SAAAA;AACT;ACvBA,eAAeM,iBACblO,SACA+M,aACA3F,YAC4B;AACtB,QAAA;AAAA,IAACL;AAAAA,EAAAA,IAAa/G;AAEpB,MAAIgH,SAASD,UAAU;AACjB,QAAA;AAAA,IAACrB;AAAAA,IAAWyI;AAAAA,EAAAA,IAASnH,OAAOK,OAAO;AAEzC,MAAI,CAAC3B;AACG,UAAA,IAAI9G,MAAM,wBAAwB;AAK1C,MAAIwP,kBAA0BrB;AAC9B,SAAKqB,oBACHA,kBAAkB,MAAMd,oBAAoBtN,SAAS;AAAA,IACnDyC,SAAS;AAAA,EACV,CAAA,IAGHuE,SAASA,OAAOG,WAAW;AAAA,IAAC1B,SAASsH;AAAAA,IAAa3F;AAAAA,EAAAA,CAAW,GAEtD;AAAA,IACL1B;AAAAA,IACAqH,aAAaqB;AAAAA,IACbD;AAAAA,IACAnH;AAAAA,EACF;AACF;ACrCA,MAAM0D,aAAW;AAAA;AAAA;AAAA,GAKX2D,8BAAoD;AAAA,EACxD1P,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAClB,CAACyF,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,MAAC1D;AAAAA,MAAWqH;AAAAA,MAAaoB;AAAAA,MAAOnH;AAAAA,IAAU,IAAA,MAAMkH,iBACpDlO,SACAyF,SACAyG,mBACF;AAEI,QAAA;AACF,YAAMlF,OAAOsH,QAAQ;AAAA,QACnBC,QAAQ;AAAA,QACRC,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAahJ,SAAS,aAAaqH,WAAW;AAAA,QACnDL,MAAM;AAAA,UACJiC,SAAS;AAAA,QAAA;AAAA,MACX,CACD,GACDxO,OAAOuC,MAAM,GAAGC,OAAMiM,MAAM,sCAAsC7B,WAAW;AAAA,CAAI,CAAC,EAAE;AAAA,aAC7E3M,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW2J,YAAYhM,KAAK;AACnCD,aAAOuC,MAAM,GAAGC,OAAMyF,IAAI,oCAAoC3F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAC9E;AAEJ;ACzCA,IAAA,UAAeoM,QAAQ,OAAO,EAAE,eAAe;ACO/C,MAAMC,WAAWD,QAAQ,UAAU;AAMnC,SAASE,WAAWC,WAAmBC,aAAqBC,YAAuC;AACjG,SAAO,IAAI5L,QAAQ,CAAC7C,SAAS+C,WAAW;AAChC2L,UAAAA,qBAAqBC,uBAAkBH,WAAW;AACrCI,uBAAAA,GAAG,SAAUnP,CAAe,QAAA;AAC7CsD,aAAOtD,GAAG;AAAA,IACX,CAAA,GAEDiP,mBAAmBE,GAAG,SAAS,MAAM;AAC3B,cAAA;AAAA,IAAA,CACT;AAEKC,UAAAA,UAAUR,SAAS,OAAO;AAAA,MAC9BS,MAAM;AAAA,MACNC,aAAa;AAAA,QAACC,OAAOC,sBAAKC,UAAUC;AAAAA,MAAAA;AAAAA,IAAqB,CAC1D;AAEOP,YAAAA,GAAG,SAAUnP,CAAe,QAAA;AAC5B0M,cAAA;AAAA,KAA0B1M,IAAI2P,KAAK,GACzCrM,OAAOtD,GAAG;AAAA,IACX,CAAA,GAGDoP,QAAQD,GAAG,WAAYnP,CAAe,QAAA;AAC9B0M,cAAA,uBAAuB1M,IAAIuC,OAAO;AAAA,IACzC,CAAA,GAED6M,QAAQD,GAAG,YAAaS,CAA2BA,cAAA;AACtCA,iBAAAA,UAASC,GAAGC,cAAc;AAAA,IACtC,CAAA,GAGDV,QAAQW,KAAKd,kBAAkB,GAC/BG,QAAQY,UAAUlB,WAAW,EAAK,GAClCM,QAAQa,SAAS;AAAA,EAAA,CAClB;AACH;ACzCA,MAAMC,oBAAoB;AAE1B,eAAeC,qBACbrQ,SACA+M,aACiB;AACX,QAAA;AAAA,IAACrB;AAAAA,MAAU1L,SAEX;AAAA,IAAC0F;AAAAA,IAAWyI;AAAAA,IAAOnH;AAAAA,EAAU,IAAA,MAAMkH,iBAAiBlO,SAAS+M,aAAab,mBAAiB;AAE7F,MAAA;AAGIO,UAAAA,WAAW,MAAMzF,OAAOsH,QAAQ;AAAA,MACpCE,SAAS;AAAA,QAACC,eAAe,UAAUN,KAAK;AAAA,MAAE;AAAA,MAC1CO,KAAK,aAAahJ,SAAS,aAAaqH,WAAW;AAAA,MACnDuD,OAAO;AAAA,QAACC,OAAOH,kBAAkBI,SAAS;AAAA,MAAA;AAAA,IAAC,CAC5C;AAEG/D,QAAAA,UAAUgE,SAASjI,SAAS,GAAG;AACjC,YAAMkI,kBAAkBjE,SAASgE,QAAQpL,IAAKsL,CAA0B,YAAA;AAAA,QACtErI,OAAOqI,OAAOnL;AAAAA,MAAAA,EACd;AACe,aAAA,MAAMkG,OAAOM,OAAO;AAAA,QACnCvJ,SAAS,sCAAsC2N,iBAAiB;AAAA,QAChEnI,MAAM;AAAA,QACN4F,SAAS6C;AAAAA,MAAAA,CACV;AAAA,IAAA;AAAA,WAIIxQ,KAAK;AACZ,UAAM,IAAItB,MAAM,uCAAuCmO,WAAW,KAAK7M,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAGhF,QAAA,IAAI7D,MAAM,kBAAkB;AACpC;ACvCA,eAAegS,cAAcC,QAA+B;AACtD,MAAA;AACF,UAAMC,OAAAA,OAAOD,MAAM;AAAA,WACZ3Q,KAAK;AACN0M,YAAA,sCAAsC1M,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAE7D;ACRA,MAAMsO,cAAc,GACdC,qBAAqB,KAErBC,qBAAsBC,gBAAuB9K,KAAK+K,IAAI,GAAGD,UAAU,IAAIF;AAE7E,eAAeI,UACbC,WACAC,aAAqBP,aACT;AACHG,WAAAA,aAAa,GAAGA,aAAaI,YAAYJ;AAC5C,QAAA;AACF,aAAO,MAAMG,UAAU;AAAA,aAChBnR,KAAK;AAEZ,UAAIA,IAAIuM,YAAYvM,IAAIuM,SAASF,cAAcrM,IAAIuM,SAASF,aAAa;AACjErM,cAAAA;AAGFqR,YAAAA,aAAaN,mBAAmBC,UAAU;AAChDtE,cAAM,qCAAqC2E,UAAU,UAAUrR,IAAIuC,OAAO,GAC1E,MAAM,IAAIa,QAAS7C,CAAAA,YAAY2C,WAAW3C,SAAS8Q,UAAU,CAAC;AAAA,IAAA;AAI5D,QAAA,IAAI3S,MAAM,oCAAoC;AACtD;AChBA,MAAM4S,uBAAqB,KAAK,KAC1BC,iBAAe,IAAI,KAAK,KAExBnD,YAAUoD,MAAAA,MAAM,CAACC,WAAAA,UAAAA,GAAaC,WAAAA,QAAS,CAAA,CAAC;AAE9C,eAAeC,cACbC,MACAC,UACAC,UACAC,QACe;AAITC,QAAAA,qBAAqBpR,sBAAKqR,SAASJ,QAAQ,GAE3CK,gBAAgBC,iBAAiBH,oBAAoBF,UAAUC,MAAM;AAC3E,QAAMb,UAAU,YAAY;AACpB3E,UAAAA,WAAW,MAAM6B,UAAQ;AAAA,MAC7BwD,KAAAA;AAAAA,MACAQ,cAAc;AAAA,MACdpP,SAAS;AAAA,QAACqP,SAASf;AAAAA,QAAoBgB,QAAQf;AAAAA,MAAY;AAAA,MAC3DgB,QAAQ;AAAA,IAAA,CACT;AAEK7F,YAAA,yCAAyCsF,oBAAoBzF,UAAUF,UAAU,GAEvF,MAAMmG,SAAAA,SAASjG,SAASC,MAAM0C,KAAkBgD,kBAAAA,aAAa,CAAC;AAAA,EAAA,CAC/D;AACH;AAEA,SAASC,iBAAiBN,UAAkBC,UAAkBC,QAAwB;AAGpF,MAAIG,gBAAgB;AACpB,SAAIJ,aAAa,UACfI,gBAAgBtR,cAAAA,QAAKJ,KAAKuR,QAAQ,UAAUF,QAAQ,IAC3CC,aAAa,WACtBI,gBAAgBtR,cAAAA,QAAKJ,KAAKuR,QAAQ,SAASF,QAAQ,IAG9CK;AACT;AC9CA,MAAMZ,qBAAqB,KAAK,KAC1BC,eAAe,IAAI,KAAK,KAExBnD,UAAUoD,MAAAA,MAAM,CAACC,WAAAA,UAAAA,GAAaC,WAAAA,QAAS,CAAA,CAAC;AAG9C,eAAee,iBAAiBb,MAA2B;AACzD,QAAMrF,WAAW,MAAM2E,UAA8B,MACnD9C,QAAQ;AAAA,IACNwD,KAAAA;AAAAA,IACAQ,cAAc;AAAA,IACdpP,SAAS;AAAA,MAACqP,SAASf;AAAAA,MAAoBgB,QAAQf;AAAAA,IAAAA;AAAAA,EAAY,CAC5D,CACH;AAEA7E,SAAAA,QAAM,iDAAiDkF,MAAKrF,UAAUF,UAAU,GAEzEE,SAASC;AAClB;ACRA,MAAMkG,iCAAiCC,YAAAA,SAAS;AAAA,EACtCC,SAAS;AAAA,EAMVC,aAAa;AAAA,EAEpBC,YACEhM,QACAtB,WACAqH,aACAkG,UACA9E,OACA;AACM,UAAA;AAAA,MAAC+E,YAAY;AAAA,IAAA,CAAK,GACxB,KAAKlM,SAASA,QACd,KAAKtB,YAAYA,WACjB,KAAKqH,cAAcA,aACnB,KAAKkG,WAAWA,UAChB,KAAK9E,QAAQA;AAAAA,EAAAA;AAAAA,EAGf,MAAMgF,QAAuB;AACvB,QAAA;AACIxG,YAAAA,OAAO,MAAM,KAAKyG,oBAAoB;AAGxC,WAAKL,eAAe,MACtB,KAAKA,aAAapG,KAAKoG,aAGzBpG,KAAK0G,MAAMzM,QAAS0M,CAAe,SAAA,KAAK3P,KAAK2P,IAAI,CAAC,GAE9C,OAAO3G,KAAK4G,cAAe,YAAY5G,KAAK4G,eAAe,KAC7D,KAAKT,SAASnG,KAAK4G,aAGnB,KAAK5P,KAAK,IAAI;AAAA,aAETzD,KAAK;AACZ,WAAKsT,QAAQtT,GAAY;AAAA,IAAA;AAAA,EAC3B;AAAA;AAAA,EAIF,MAAMkT,sBAAkD;AACtD,UAAM9C,QAAqB,KAAKwC,WAAW,KAAK,CAAA,IAAK;AAAA,MAACS,YAAY,KAAKT;AAAAA,IAAM;AAEzE,QAAA;AACK,aAAA,MAAM,KAAK9L,OAAOsH,QAAQ;AAAA,QAC/BE,SAAS;AAAA,UAACC,eAAe,UAAU,KAAKN,KAAK;AAAA,QAAE;AAAA,QAC/CO,KAAK,aAAa,KAAKhJ,SAAS,aAAa,KAAKqH,WAAW,YAAY,KAAKkG,QAAQ;AAAA,QACtF3C;AAAAA,MAAAA,CACD;AAAA,aACMlQ,OAAO;AAEd,UAAIyL,MAAMzL,MAAMmM,aAAanM,MAAMqM,SAASC,KAAKjK,UAAUrC,MAAMqC;AAG7DoJ,YAAAA,QAAQmC,WACVnC,MAAMlG,OAAOvF,KAAK,IAEd,IAAIxB,MAAM,sCAAsCiN,GAAG,EAAE;AAAA,IAAA;AAAA,EAC7D;AAEJ;ACnEA,MAAM4H,cAAcA,CAACtT,QAAsBuT,cAAuC;AAChF,MAAIrS,UAAUlB,OAAOkB,QAAQqS,SAAS,EAAEhV,SACpCiV,eAA8B;AAAA,IAACC,MAAMF;AAAAA,EAAAA,GACrChV,QAAQqD,KAAKjD,IAAI;AAErB,QAAM4D,QAASoN,CAA4BA,cAAA;AACzC,UAAM+D,UAAUC,kBAAAA,QAAS/R,KAAKjD,IAAAA,IAAQJ,KAAK;AACvCoR,IAAAA,UAAS5J,WAAW4J,UAAS5J,UAAU,KAAK4J,UAASiE,SAASjE,UAASiE,QAAQ,IACjF1S,QAAQ2S,OAAO,GAAGlE,UAAS8D,IAAI,KAAK9D,UAAS5J,OAAO,IAAI4J,UAASiE,KAAK,MAAMF,OAAO,MAEnFxS,QAAQ2S,OAAO,GAAGlE,UAAS8D,IAAI,KAAKC,OAAO;AAAA,EAE/C;AAEO,SAAA;AAAA,IACLI,KAAMnE,CAA4BA,cAAA;AAC5BA,MAAAA,UAAS8D,SAASD,aAAaC,QACjClR,MAAMiR,YAAY,GAClBtS,QAAQiB,QAAQ,GAChBjB,UAAUlB,OAAOkB,QAAQyO,UAAS8D,IAAI,EAAElV,MACxCA,GAAAA,QAAQqD,KAAKjD,IAAAA,KACJgR,UAAS8D,SAASD,aAAaC,QAAQ9D,UAASjL,UACzDnC,MAAMoN,SAAQ,GAEhB6D,eAAe7D;AAAAA,IACjB;AAAA,IACAjL,QAASiL,CAA4BA,cAAA;AAC7BA,YAAAA,SAAQ,GACd6D,eAAe7D;AAAAA,IACjB;AAAA,IACAxN,SAASA,MAAM;AACbjB,cAAQiB,QAAQ,GAChB5D,QAAQqD,KAAKjD,IAAI;AAAA,IACnB;AAAA,IACA0D,MAAMA,MAAM;AACVnB,cAAQmB,KAAK,GACb9D,QAAQqD,KAAKjD,IAAI;AAAA,IAAA;AAAA,EAErB;AACF;ACxDA,SAASoV,cAAc7N,OAAsB;AAC3C,QAAMI,IAAIJ,SAAQ,IAAI,IAAID,KAAK+N,MAAM/N,KAAKgO,IAAI/N,KAAI,IAAID,KAAKgO,IAAI,IAAI,CAAC;AACpE,SAAO,IAAI/N,QAAOD,KAAK+K,IAAI,MAAM1K,CAAC,GAAGlE,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,MAAM,MAAM,MAAM,IAAI,EAAEkE,CAAC,CAAC;AACrF;ACHA,SAAS4N,cAAcC,UAA2B;AAEzC,SAAA,CAAC,SAASpH,KAAKoH,QAAQ;AAChC;AC8BA,MAAM1H,QAAQ2H,eAAAA,QAAY,eAAe,GAEnCC,+BAA+B,IAC/BC,2BAA2B,IAa3B/J,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAajB,SAASgK,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDqI,QAAQ,aAAa;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACrCmF,QAAQ,OAAO;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EAC/BmF,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAASwJ;AAAAA,EAAAA,CAA6B,EAC9EpH,QAAQ,aAAa;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAM,CAAA,EAAE7B;AAC7D;AAEA,MAAM0L,wBAA8C;AAAA,EAClDlW,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA;AAAAA,EAEAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C,SAClB,CAACgH,QAAQ8N,IAAI,IAAI,MAAMC,qBAAqB/U,SAASD,IAAI,GACzD;AAAA,MAAC2F;AAAAA,MAAWqH;AAAAA,MAAakG;AAAAA,MAAUhB;AAAAA,MAAQ+C;AAAAA,IAAAA,IAAeF;AAG5D7C,QAAAA,WAAW,MAAM+C,gBAAgB,IAAI;AACvC7U,aAAOuC,MAAM,sBAAsB;AACnC;AAAA,IAAA;AAEF,UAAMuM,cAAcnO,cAAAA,QAAKJ,KAAKuR,QAAQ+C,WAAW;AAEjD7U,WAAOuC,MAAM,gXAA+D,GAC5EvC,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,WAAW,CAAC,KAAKtS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,SAAS,CAAC,KAAKtS,OAAMgE,KAAKoG,WAAW,EAAErG,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,UAAU,CAAC,KAAKtS,OAAMgE,KAAKsM,QAAQ,EAAEvM,OAAO,EAAE,CAAC,SAAI,GAChFvG,OAAOuC,MAAM,yEAA+D,GAC5EvC,OAAOuC,MAAM,gXAA+D,GAC5EvC,OAAOuC,MAAM,EAAE,GACfvC,OAAOuC,MAAM,0BAA0BC,OAAMgE,KAAKsI,WAAW,CAAC,GAAG;AAEjE,UAAMvQ,QAAQqD,KAAKjD,IACboW,GAAAA,kBAAkBzB,YAAYtT,QAAQ,kCAAkC,GAKxE6O,YAAY,MAAMmG,GAAAA,QAAQrU,cAAAA,QAAKJ,KAAK0U,GAAO,OAAA,GAAG,gBAAgB,CAAC;AAGrE,eAAWC,OAAO,CAACpD,QAAQnR,cAAAA,QAAKJ,KAAKsO,WAAW,QAAQ,GAAGlO,cAAAA,QAAKJ,KAAKsO,WAAW,OAAO,CAAC;AACtFsG,WAAAA,UAAUD,KAAK;AAAA,QAAC5T,WAAW;AAAA,MAAA,CAAK;AAGlCmL,UAAM,qCAAqCoC,SAAS;AACpD,UAAMuG,sBAAsBzU,cAAAA,QAAKJ,KAAKsO,WAAW,aAAa,GAGxDwG,eAAepG,KAAAA,kBAAkBmG,mBAAmB,GACpDE,gBAAgB,IAAIC,WAAAA,MAAM;AAE5B,QAAA;AACF,YAAMC,mBAAmB,IAAI/C,yBAC3B5L,QACA8N,KAAKpP,WACLoP,KAAK/H,aACL+H,KAAK7B,UACL6B,KAAK3G,KACP,GAEMkF,QAAgB,CAAE;AACxB,UAAI5M,IAAI;AACR,uBAAiB6M,QAAQqC;AACvBtC,cAAM1P,KAAK2P,IAAI,GACf7M,KACAyO,gBAAgBjB,IAAI;AAAA,UAClBL,MAAM;AAAA,UACN/O,QAAQ;AAAA,UACRqB,SAASO;AAAAA,UACTsN,OAAO4B,iBAAiB5C;AAAAA,QAAAA,CACzB;AAGH,UAAI6C,uBAAuB;AAErB,YAAA;AAAA,QAAC5K,SAAS6K;AAAAA,MAAAA,IAAQ,MAAM,OAAO,OAAO;AACtCA,YAAAA,KACJxC,OACA,OAAOC,SAAe;AACpB,YAAIA,KAAKrL,SAAS,UAAUqL,KAAKrL,SAAS;AACxC,gBAAM4J,cAAcyB,KAAKxB,KAAKwB,KAAK3U,MAAM2U,KAAKrL,MAAM+G,SAAS;AAAA,aACxD;AACL,gBAAM8G,MAAM,MAAMnD,iBAAiBW,KAAKxB,GAAG;AACrC2D,gBAAAA,cAAcM,aAAa,MAAM;AACxBC,yBAAAA,MAAM,GAAGF,GAAG;AAAA,CAAI;AAAA,UAAA,CAC9B;AAAA,QAAA;AAGqB,gCAAA,GACxBZ,gBAAgBjB,IAAI;AAAA,UAClBL,MAAM;AAAA,UACN/O,QAAQ;AAAA,UACRqB,SAAS0P;AAAAA,UACT7B,OAAO4B,iBAAiB5C;AAAAA,QAAAA,CACzB;AAAA,MAAA,GAEH;AAAA,QAACkD,aAAanB,KAAKmB;AAAAA,MAAAA,CACrB;AAAA,aACO7V,OAAO;AACd8U,sBAAgB1S,KAAK;AACf,YAAA;AAAA,QAACC;AAAAA,MAAAA,IAAW2J,YAAYhM,KAAK;AACnC,YAAM,IAAIxB,MAAM,sCAAsC6D,OAAO,EAAE;AAAA,IAAA;AAGjE+S,iBAAazW,OACb,MAAMmX,SAAAA,SAASV,YAAY,GAE3BN,gBAAgBjB,IAAI;AAAA,MAACL,MAAM;AAAA,MAAqC/O,QAAQ;AAAA,IAAA,CAAK;AACzE,QAAA;AACIkK,YAAAA,WAAWC,WAAWC,aAAce,CAA2B,mBAAA;AACnEkF,wBAAgBrQ,OAAO;AAAA,UACrB+O,MAAM,mCAAmCM,cAAclE,cAAc,CAAC;AAAA,QAAA,CACvE;AAAA,MAAA,CACF;AAAA,aACM9P,KAAK;AACZgV,YAAAA,gBAAgB1S,QACV,IAAI5D,MAAM,4BAA4BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG3DyS,oBAAgBjB,IAAI;AAAA,MAClBL,MAAM,kCAAkCjR,OAAMgE,KAAK,GAAGqI,SAAS,EAAE,CAAC;AAAA,IACnE,CAAA,GACD,MAAM4B,cAAc5B,SAAS,GAE7BkG,gBAAgBjB,IAAI;AAAA,MAClBL,MAAM,6BAA6BE,kBAAAA,QAAS/R,KAAKjD,IAAI,IAAIJ,KAAK,CAAC;AAAA,IAAA,CAChE,GACDwW,gBAAgB5S,QAAQ;AAAA,EAAA;AAE5B;AAGA,eAAeyS,qBACb/U,SACAD,MACgD;AAC1CO,QAAAA,QAAQ,MAAMoU,gBAAc3U,IAAI,GAChC,CAAC0F,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,IAACsC;AAAAA,IAAQrL;AAAAA,MAAWL,SACpB;AAAA,IAAC0F;AAAAA,IAAWqH;AAAAA,IAAa/F;AAAAA,MAAU,MAAMkH,iBAC7ClO,SACAyF,SACAyG,mBACF,GAEM;AAAA,IAACiC;AAAAA,EAAAA,IAASnH,OAAOK,OAAO;AAC9B,MAAI,CAAC8O,kBAAAA,QAAShI,KAAK,KAAKA,MAAM3F,SAAS;AAC/B,UAAA,IAAI5J,MAAM,kBAAkB;AAGpC,MAAI,CAACuX,kBAAAA,QAASpJ,WAAW,KAAKA,YAAYvE,SAAS;AACjD,UAAM,IAAI5J,MAAM,WAAWmO,WAAW,+BAA+B;AAGjEkG,QAAAA,WAAWtN,OAAOrF,MAAM,WAAW,KAAM,MAAM+P,qBAAqBrQ,SAAS+M,WAAW,CAAE;AAChG,MAAIkG,SAASzK,SAAS;AACpB,UAAM,IAAI5J,MAAM,aAAa0B,MAAM,WAAW,CAAC,2BAA2B;AAG5E,MAAI,iBAAiBA,UACfA,MAAM2V,cAAc,KAAK3V,MAAM2V,cAAcxB;AAC/C,UAAM,IAAI7V,MAAM,iCAAiC6V,wBAAwB,QAAQ;AAIrF,QAAM2B,qBAAqB,GAAGrJ,WAAW,WAAWkG,QAAQ;AAC5D,MAAIoD,MAAM,OAAO,YACX/V,MAAM+V,QAAQrI,SAETsI,KAAWhW,WAAAA,MAAM+V,GAAG,IAGf,MAAM3K,OAAOM,OAAO;AAAA,IAChC/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACTuI,SAASlK,cAAAA,QAAKJ,KAAKL,SAAS+V,kBAAkB;AAAA,IAC9CG,QAAQD,KAAAA;AAAAA,EAAAA,CACT,GAEA;AAGH,SAAIjC,cAAcgC,GAAG,MACnBA,MAAMvV,cAAAA,QAAKJ,KAAK2V,KAAKD,kBAAkB,IAIrC,CAAC9V,MAAMkW,aAAaC,KAAAA,WAAWJ,GAAG,MACZ,MAAM3K,OAAOM,OAAO;AAAA,IAC1C/D,MAAM;AAAA,IACNxF,SAAS,SAAS4T,GAAG;AAAA,IACrBrL,SAAS;AAAA,EACV,CAAA,MAKCqL,MAAM,MAIH,CACLrP,QACA;AAAA,IACEtB;AAAAA,IACAqH;AAAAA,IACAkG;AAAAA,IACA9E;AAAAA,IACA8D,QAAQnR,cAAAA,QAAKgC,QAAQuT,GAAG;AAAA,IACxBrB,aAAalU,cAAAA,QAAKqR,SAASkE,GAAG;AAAA,IAC9BG,WAAWlW,MAAMkW;AAAAA,IACjBP,aAAa3V,MAAM2V,eAAezB;AAAAA,EAAAA,CACnC;AAEL;ACrRA,MAAM9J,aAAW;AAAA;AAAA;AAAA,GAKXgM,6BAAmD;AAAA,EACvD/X,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAClB,CAACyF,OAAO,IAAI1F,KAAKqJ,oBACjB;AAAA,MAAC1D;AAAAA,MAAWqH;AAAAA,MAAaoB;AAAAA,MAAOnH;AAAAA,IAAU,IAAA,MAAMkH,iBACpDlO,SACAyF,SACAyG,mBACF;AAEI,QAAA;AACF,YAAMlF,OAAOsH,QAAQ;AAAA,QACnBC,QAAQ;AAAA,QACRC,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAahJ,SAAS,aAAaqH,WAAW;AAAA,QACnDL,MAAM;AAAA,UACJiC,SAAS;AAAA,QAAA;AAAA,MACX,CACD,GAEDxO,OAAOuC,MACL,GAAGC,OAAMiM,MACP,+BAA+B7B,WAAW;AAAA;AAAA,CAC5C,CAAC,EACH,GAEA5M,OAAOuC,MACL,GAAGC,OAAMsS,KAAK;AAAA,CAAsE,CAAC,EACvF;AAAA,aACO7U,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW2J,YAAYhM,KAAK;AACnCD,aAAOuC,MAAM,GAAGC,OAAMyF,IAAI,mCAAmC3F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAC7E;AAEJ,GCxCMkU,4BAA4B,IAuB5BjM,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAajB,SAASgK,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDqI,QAAQ,SAAS;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACjCmF,QAAQ,UAAU;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EAClCmF,QAAQ,SAAS;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAAS2L;AAAAA,IAA2BC,OAAO;AAAA,EAAI,CAAA,EAAEzN;AACxF;AAEA,MAAM0N,2BAAyE;AAAA,EAC7ElY,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C,SAClBM,QAAQ,MAAMoU,gBAAc3U,IAAI,GAChC,CAAC0F,OAAO,IAAI1F,KAAKqJ,oBAEjB;AAAA,MAAC1D;AAAAA,MAAWqH;AAAAA,MAAaoB;AAAAA,MAAOnH;AAAAA,IAAAA,IAAU,MAAMkH,iBACpDlO,SACAyF,SACAyG,mBACF,GAEMoE,QAAsC;AAAA,MAACC,OAAOoG,0BAA0BnG,SAAS;AAAA,IAAC;AACxF,QAAIlQ,MAAMiQ,OAAO;AAGf,UAAIjQ,MAAMiQ,QAAQ,KAAKjQ,MAAMiQ,QAAQuG,OAAOC;AAC1C,cAAM,IAAInY,MACR,qDAAqDkY,OAAOC,gBAAgB,EAC9E;AAEIxG,YAAAA,QAAQjQ,MAAMiQ,MAAMC,SAAS;AAAA,IAAA;AAGjClQ,QAAAA,MAAM0W,UAAU1W,MAAM2W;AACpB,UAAA;AACIC,cAAAA,eAAeC,iBAAiB7W,MAAM0W,MAAM,GAC5CI,cAAcD,iBAAiB7W,MAAM2W,KAAK;AAEhD,YAAIG,eAAeF,gBAAgBG,gBAAQD,aAAaF,YAAY;AAC5D,gBAAA,IAAItY,MAAM,sCAAsC;AAGxD0R,cAAM0G,SAAS1W,MAAM0W,QACrB1G,MAAM2G,QAAQ3W,MAAM2W;AAAAA,eACb/W,KAAK;AACZ,cAAM,IAAItB,MAAM,uBAAuBsB,GAAG,EAAE;AAAA,MAAA;AAI5CuM,QAAAA;AACA,QAAA;AACS,iBAAA,MAAMzF,OAAOsH,QAA4B;AAAA,QAClDE,SAAS;AAAA,UAACC,eAAe,UAAUN,KAAK;AAAA,QAAE;AAAA,QAC1CO,KAAK,aAAahJ,SAAS,aAAaqH,WAAW;AAAA,QACnDuD,OAAO;AAAA,UAAC,GAAGA;AAAAA,QAAAA;AAAAA,MAAK,CACjB;AAAA,aACMlQ,OAAO;AACR,YAAA;AAAA,QAACqC;AAAAA,MAAAA,IAAW2J,YAAYhM,KAAK;AACnCD,aAAOC,MAAM,GAAGuC,OAAMyF,IAAI,+BAA+B3F,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAGrEgK,QAAAA,YAAYA,SAASgE,SAAS;AAC5BhE,UAAAA,SAASgE,QAAQjI,WAAW,GAAG;AACjCrI,eAAOuC,MAAM,mBAAmB;AAChC;AAAA,MAAA;AAGI4U,YAAAA,QAAQ,IAAIC,0BAAM;AAAA,QACtBC,SAAS,CACP;AAAA,UAAC7Y,MAAM;AAAA,UAAY8Y,OAAO;AAAA,UAAYC,WAAW;AAAA,QAAA,GACjD;AAAA,UAAC/Y,MAAM;AAAA,UAAa8Y,OAAO;AAAA,UAAcC,WAAW;AAAA,QAAA,GACpD;AAAA,UAAC/Y,MAAM;AAAA,UAAY8Y,OAAO;AAAA,UAAaC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAE5D;AAEQjH,eAAAA,QAAQ7J,QAAS+J,CAAmC,WAAA;AACrD,cAAA;AAAA,UAACnL;AAAAA,UAAI1D;AAAAA,QAAAA,IAAa6O;AACxB2G,cAAMK,OAAO;AAAA,UACXC,UAAU;AAAA,UACV9V,WAAW+V,QAAY9V,YAAAA,KAAKiH,MAAMlH,SAAS,GAAG,qBAAqB;AAAA,UACnEmR,UAAUzN;AAAAA,QAAAA,CACX;AAAA,MAAA,CACF,GAED8R,MAAMQ,WAAW;AAAA,IAAA;AAAA,EACnB;AAEJ;AAEA,SAASX,iBAAiBY,MAA4C;AACpE,MAAI,CAACA,KAAM;AACX,QAAMC,aAAahP,QAAAA,MAAM+O,MAAM,cAAc,oBAAIhW,MAAM;AACvD,MAAIkW,QAAAA,QAAQD,UAAU;AACbA,WAAAA;AAGT,QAAM,IAAIpZ,MAAM,WAAWmZ,IAAI,8BAA8B;AAC/D;AC/IA,MAAMrN,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaXwN,eAAqC;AAAA,EACzCvZ,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,SACA8K,eAEoB,MAAMC,kBAEPhL,MAAMC,SAAS8K,SAAS;AAAA,EAE7CJ,UAAAA;AACF;AAEA,eAAeK,iBAAiB;AAUlB,UAAA,MAAM;mBAAO,kBAAiC;AAAA,EAAA,CAAA,EAAA,KAAA,SAAA,GAAA;AAAA,WAAA,EAAA;AAAA,EAAA,CAAA,GAE/CC;AACb;ACvCA,MAAMmN,sBAAsB,6BACtBC,kBAAkB;AAMFC,eAAAA,cACpBC,aACAhY,OACAN,SACkB;AACZ,QAAA;AAAA,IAAC+G;AAAAA,IAAW2E;AAAAA,IAAQvL;AAAAA,EAAUH,IAAAA,SAC9BuY,SAAS,OAAOD,cAClBE,wBAAwBF,WAAW,IACnCG,kBAAgB/M,MAAM,IAEpBgN,cAAcH,OAAOI,SAAS,GAAG;AACvC,MAAID,eAAe,CAAE,MAAME,8BAA8BL,QAAQvY,OAAO;AAC/D,WAAA;AAEH6Y,QAAAA,mBACJ,OAAOvY,MAAMwY,cAAgB,MACzB,MAAMC,qBAAqBL,aAAa1Y,OAAO,IAC/CgZ,CAAAA,CAAQ1Y,MAAMwY;AAEhBR,SAAAA,gBAAgBC,UAClBpY,OAAOuC,MAAM,wBAAwB6V,MAAM,EAAE,GAQ/C,MALexR,UAAU;AAAA,IACvBE,aAAa;AAAA,IACbC,gBAAgB;AAAA,EACjB,CAAA,EAEYoH,QAAQ;AAAA,IACnBC,QAAQ;AAAA,IACRuD,KAAK;AAAA,IACLpF,MAAM;AAAA,MAAC6L;AAAAA,MAAQM;AAAAA,IAAgB;AAAA,IAC/BvG,cAAc;AAAA,EACf,CAAA,GAEM;AACT;AAEA,SAASyG,qBAAqBL,aAAsB1Y,SAA6C;AACzF,QAAA;AAAA,IAAC0L;AAAAA,IAAQvL;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAEhCG,SAAAA,OAAOuC,MAAM,EAAE,GACXgW,cACFvY,OAAOuC,MAAMuW,iBAAAA;AAAAA,QACTtW,OAAMuW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,WAAW,CAAC;AAAA,WAC3CzW,OAAMyF,IAAIzF,OAAM0W,UAAU,QAAQ,CAAC,CAAC;AAAA;AAAA,iCAEd1W,OAAM0W,UAAU,gBAAgB,CAAC;AAAA;AAAA,KAE7D,IAEDlZ,OAAOuC,MAAMuW,iBAAAA;AAAAA,QACTtW,OAAMuW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,WAAW,CAAC;AAAA;AAAA;AAAA,iBAGrCzW,OAAM0W,UAAU,gBAAgB,CAAC;AAAA;AAAA;AAAA,KAG7C,GAGHlZ,OAAOuC,MAAM,EAAE,GAERgJ,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAASwW,iBAAAA;AAAAA;AAAAA;AAAAA,IAGTjO,SAAS;AAAA,EAAA,CACV;AACH;AAEA,SAAS4N,8BACPL,QACAvY,SACkB;AACZ,QAAA;AAAA,IAAC0L;AAAAA,IAAQvL;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAEhCG,SAAAA,OAAOuC,MAAM,EAAE,GACfvC,OAAOuC,MAAMC,OAAMuW,OAAO,GAAGC,oBAAAA,QAAWC,OAAO,wCAAwC,CAAC,GAEpFb,WAAW,OACbpY,OAAOuC,MAAM,kCAAkC,GAC/CvC,OAAOuC,MAAM,2CAA2C,GACxDvC,OAAOuC,MAAM,iCAAiC,GAC9CvC,OAAOuC,MAAM,2BAA2B,MAExCvC,OAAOuC,MAAM,KAAK6V,OAAOe,QAAQ,OAAO,OAAO,EAAEA,QAAQ,OAAO,KAAK,CAAC,EAAE,GACxEnZ,OAAOuC,MAAM,KAAK6V,OAAOe,QAAQ,OAAO,OAAO,EAAEA,QAAQ,OAAO,SAAS,CAAC,EAAE,IAG9EnZ,OAAOuC,MAAM,EAAE,GAERgJ,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAASwW,iBAAAA;AAAAA,+BACkBtW,OAAMyF,IAAI,OAAO,CAAC;AAAA,gBACjCzF,OAAM0W,UAAU,iBAAiB,CAAC;AAAA,IAC9CrO,SAAS;AAAA,EAAA,CACV;AACH;AAEA,SAASyN,kBAAgB/M,QAAsC;AAC7D,SAAOA,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACT8T,QAAQgD;AAAAA,IACRlM,UAAWkL,CAAAA,WAAWiB,eAAejB,QAAQA,MAAM;AAAA,EAAA,CACpD;AACH;AAEA,SAASgB,aAAahB,QAA+B;AACnD,MAAIA,WAAW,OAAOA,WAAW,eAAeA,WAAW;AAClDA,WAAAA;AAGL,MAAA;AACF,UAAMkB,UAAUlB,OACbe,QAAQ,aAAa,KAAKnB,mBAAmB,EAAE,EAC/CmB,QAAQ,OAAOlB,eAAe,GAE3BsB,SAAS5H,aAAAA,QAAI9I,MAAMyQ,OAAO;AAC5BE,QAAAA,OAAOD,OAAOC,QAAQ;AAC1B,WAAI,YAAYzM,KAAKwM,OAAOE,YAAY,EAAE,MACxCD,OAAOA,KAAKL,QAAQ,cAAc,EAAE,IAGtCK,OAAOA,KAAKL,QAAQlB,iBAAiB,IAAI,EAAEkB,QAAQ,IAAIO,OAAO1B,qBAAqB,GAAG,GAAG,GAAG,GAErF,GAAGuB,OAAOE,QAAQ,KAAKD,IAAI;AAAA,EAAA,QACtB;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAASH,eAAejB,QAAuBD,aAAoC;AACjF,MAAIC,WAAW,OAAOA,WAAW,eAAeA,WAAW;AAClD,WAAA;AAGL,MAAA;AACEvP,WAAAA,aAAAA,QAAAA,MAAMuP,UAAW,CAAmB,GACjC;AAAA,EAAA,QACK;AAAA,EAAA;AAId,SAAI,aAAarL,KAAKoL,WAAW,IACxB,+DAGF,mBAAmBA,WAAW;AACvC;AAEA,SAASE,wBAAwBF,aAA6B;AAC5D,QAAMC,SAASgB,aAAajB,WAAW,GACjCpQ,SAASsR,eAAejB,QAAQD,WAAW;AACjD,MAAIpQ,WAAW;AACP,UAAA,IAAItJ,MAAMsJ,MAAM;AAGxB,MAAI,CAACqQ;AACG,UAAA,IAAI3Z,MAAM,gBAAgB;AAG3B2Z,SAAAA;AACT;AC5KA,MAAM7N,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAUXoP,uBAA6C;AAAA,EACjDnb,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,IAAUH,IAAAA,SACX,CAACuY,MAAM,IAAIxY,KAAKqJ;AAEtB,QAAI,CAACmP;AACG,YAAA,IAAI3Z,MAAM,yDAAyD;AAG3E,UAAM0B,QAAQP,KAAKQ;AAGJwP,gBAAAA,QAAG0G,WAAW3V,sBAAKJ,KAAKpB,QAAQ4B,IAAOqX,GAAAA,MAAM,CAAC,KAE3DpY,OAAO2L,KAAK,WAAWyM,MAAM,mDAAmD,GAGlE,MAAMF,cAAcE,QAAQjY,OAAON,OAAO,KAExDG,OAAOuC,MAAM,gCAAgC;AAAA,EAAA;AAGnD,GC1CMqX,YAAuC;AAAA,EAC3Cpb,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCHMC,aAAW;AAAA;AAAA;AAAA;AAAA,GAMXsP,0BAAgD;AAAA,EACpDrb,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQ4G;AAAAA,IAAAA,IAAa/G,SACtB,CAACuY,MAAM,IAAIxY,KAAKqJ,oBAChBpC,SAASD,UAAU;AAAA,MAACE,aAAa;AAAA,MAAMC,gBAAgB;AAAA,IAAK,CAAA,GAC5D+S,WAAW,MAAMxB,gBAAgBF,QAAQvY,OAAO;AAClD,QAAA;AACF,YAAMgH,OAAOsH,QAAQ;AAAA,QAACC,QAAQ;AAAA,QAAUG,KAAK,SAASuL,QAAQ;AAAA,MAAA,CAAG,GACjE9Z,OAAOuC,MAAM,gBAAgB;AAAA,aACtBxC,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA4BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC3D;AAEJ;AAIA,eAAegW,gBAAgByB,WAA+Bla,SAA4B;AACxF,QAAMma,kBAAkBD,aAAaA,UAAUlN,YAAAA,GACzC;AAAA,IAACtB;AAAAA,IAAQ3E;AAAAA,EAAAA,IAAa/G,SAGtBoa,UAAU,MAFDrT,UAAU;AAAA,IAACE,aAAa;AAAA,IAAMC,gBAAgB;AAAA,EAAK,CAAA,EAErCoH,QAAsB;AAAA,IAACwD,KAAK;AAAA,EAAA,CAAQ;AACjE,MAAIqI,iBAAiB;AACbvM,UAAAA,WAAWwM,QAAQ7D,OAAQgC,CAAWA,WAAAA,OAAOA,OAAOvL,YAAY,MAAMmN,eAAe,EAAE,CAAC;AAC9F,QAAI,CAACvM;AACH,YAAM,IAAIhP,MAAM,WAAWsb,SAAS,aAAa;AAGnD,WAAOtM,SAASpI;AAAAA,EAAAA;AAGZqI,QAAAA,UAAUuM,QAAQ/U,IAAKkT,CAAY,YAAA;AAAA,IAACjQ,OAAOiQ,OAAO/S;AAAAA,IAAI7G,MAAM4Z,OAAOA;AAAAA,EAAAA,EAAQ;AACjF,SAAO7M,OAAOM,OAAO;AAAA,IACnBvJ,SAAS;AAAA,IACTwF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;ACjDA,MAAMnD,aAAW;AAAA;AAAA;AAAA,GAKX2P,yBAA+C;AAAA,EACnD1b,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,QAAUH,SACX;AAAA,MAAC+G;AAAAA,IAAAA,IAAa/G,SAEdoa,UAAU,MADDrT,UAAU;AAAA,MAACE,aAAa;AAAA,MAAMC,gBAAgB;AAAA,IAAK,CAAA,EACrCoH,QAAsB;AAAA,MAACwD,KAAK;AAAA,IAAA,CAAQ;AACjE3R,WAAOuC,MAAM0X,QAAQ/U,IAAKkT,YAAWA,OAAOA,MAAM,EAAE7X,KAAK;AAAA,CAAI,CAAC;AAAA,EAAA;AAElE;ACpBO,SAAS4Z,yBAAyBvN,aAAqC;AAC5E,MAAI,CAACA;AACI,WAAA;AAGHpO,QAAAA,OAAO,GAAGoO,WAAW;AAEvBpO,SAAAA,KAAKqO,YAAY,MAAMrO,OAClB,gDAGLA,KAAK6J,SAAS,IACT,oDAGL7J,KAAK6J,SAASyE,KACT,6CAGJ,aAAaC,KAAKvO,IAAI,IAItB,yBAAyBuO,KAAKvO,IAAI,IAInC,QAAQuO,KAAKvO,IAAI,IACZ,yDAGF,KAPE,0EAJA;AAYX;AC9BO,SAAS4b,0BACd7O,QACA0B,UAAgD,IAC/B;AACjB,SAAO1B,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACT4K,UAAW1O,CAAS,SACN2b,yBAAyB3b,IAAI,KAKlC;AAAA,IAET,GAAGyO;AAAAA,EAAAA,CACJ;AACH;AChBO,MAAMoN,eAAe;AAErB,SAASC,YAAYzT,QAAyD;AACnF,SAAOA,OAAOsH,QAAkC;AAAA,IAACI,KAAK;AAAA,EAAA,CAAW;AACnE;AAEgBgM,SAAAA,YACd1T,QACA2T,WACA5N,aACsC;AACtC,SAAO6N,OAAO5T,QAAQ,OAAO2T,WAAW5N,cAAc;AAAA,IAACA;AAAAA,MAAeiB,MAAS;AACjF;AAEgB6M,SAAAA,YACd7T,QACA2T,WACA5N,aACsC;AACtC,SAAO6N,OAAO5T,QAAQ,SAAS2T,WAAW5N,cAAc;AAAA,IAACA;AAAAA,MAAeiB,MAAS;AACnF;AAEgB8M,SAAAA,YACd9T,QACA2T,WACsC;AACbA,SAClBC,OAAO5T,QAAQ,SAAS,GAAG2T,SAAS,WAAW,EAAE;AAC1D;AAEgBI,SAAAA,YAAY/T,QAAsB2T,WAAgD;AACzFC,SAAAA,OAAO5T,QAAQ,UAAU2T,SAAS;AAC3C;AAEA,SAASC,OACP5T,QACAuH,QACAoM,WACAjO,MACA;AACA,SAAO1F,OAAOsH,QAAQ;AAAA,IAACC;AAAAA,IAAQG,KAAK,YAAYiM,SAAS;AAAA,IAAIjO;AAAAA,EAAAA,CAAK;AACpE;ACrCasO,MAAAA,qBAAuC,OAAOjb,MAAMC,YAAY;AACrE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQuL;AAAAA,EAAU1L,IAAAA,SAC9B,CAAG4W,EAAAA,OAAOqE,aAAa,IAAIlb,KAAKqJ,oBAChCpC,SAASD,UAAU,GAEnBmU,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAItc,MAAMsc,SAAS;AAG3B,QAAM,CAAC1N,UAAU2N,SAASC,eAAe,IAAI,MAAM9X,QAAQY,IAAI,CAC7D8C,OAAOwG,SAASC,KAAK,EAAE4N,KAAMC,CAAAA,SAASA,KAAKjW,IAAKkW,CAAAA,OAAOA,GAAG5c,IAAI,CAAC,GAC/D6c,YAAwBxU,MAAM,EAAEqU,KAAMC,CAAAA,SAASA,KAAKjW,IAAKkW,QAAOA,GAAG5c,IAAI,CAAC,GACxEqI,OAAOsH,QAAQ;AAAA,IAACI,KAAK;AAAA,EAAY,CAAA,CAAC,CACnC;AAED,MAAIiM,YAAY,OAAO/D,SAAS2D,0BAA0B7O,MAAM,IAC5D+P,kBAAkBd;AAQtB,MANIA,UAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU5V,MAAM,CAAC,IAE7B0W,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS,IAG3CQ,QAAQxC,SAASgC,SAAS;AAC5B,UAAM,IAAI/b,MAAM,kBAAkB6c,eAAe,kBAAkB;AAGrE,MAAIR,eAAe;AACXU,UAAAA,aAAa7O,oBAAoBmO,aAAa;AAChDU,QAAAA;AACI,YAAA,IAAI/c,MAAM+c,UAAU;AAAA,EAAA;AAI9B,QAAM5O,cAAc,OAAOkO,iBAAiB9N,qBAAqBzB,MAAM;AACvE,MAAIqB,eAAe,CAACS,SAASmL,SAAS5L,WAAW;AAC/C,UAAM,IAAInO,MAAM,YAAYmO,WAAW,mBAAmB;AAIxD,MAAA,CADmBqO,gBAAgBzC,SAAS,2BAA2B;AAEnE,UAAA,IAAI/Z,MAAM,4CAA4C;AAG1D,MAAA;AACF,UAAM4c,YAAwBxU,QAAQ2T,WAAW5N,WAAW,GAC5D5M,OAAOuC,MACL,iBAAiB+Y,eAAe,YAC9B1O,eAAe,iBAAiBA,WAAW,EAAE,eAEjD;AAAA,WACO7M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAAmCsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAEpE;ACzDA,SAASiS,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE6W,OAAO,SAAS;AAAA,IAAC3T,MAAM;AAAA,EAAU,CAAA,EAAEkB;AAC/F;AAMa0S,MAAAA,qBAAyD,OAAO9b,MAAMC,YAAY;AACvF,QAAA;AAAA,IAAC+G;AAAAA,IAAW2E;AAAAA,IAAQvL;AAAAA,MAAUH,SAC9B,CAAGub,EAAAA,EAAE,IAAIxb,KAAKqJ,oBACd;AAAA,IAAC0S;AAAAA,MAAS,MAAMpH,gBAAc3U,IAAI,GAClCiH,SAASD,UAAU;AACzB,MAAI,CAACwU;AACG,UAAA,IAAI3c,MAAM,qCAAqC;AAGnD+b,MAAAA,YAAY,GAAGY,EAAE;AACfQ,QAAAA,UAAUzB,yBAAyBK,SAAS;AAC9CoB,MAAAA;AACIA,UAAAA;AAERpB,cAAYA,UAAUe,WAAWlB,YAAY,IAAIG,UAAU5V,MAAM,CAAC,IAAI4V;AAEtE,QAAM,CAACqB,cAAc,IAAI,MAAM1Y,QAAQY,IAAI,CAACsX,YAAwBxU,MAAM,CAAC,CAAC,GACtEiV,cAAcD,eAAe5R,KAAM8R,CAASA,SAAAA,KAAKvd,SAASgc,SAAS,GACnElY,UACJwZ,eAAeA,YAAYlP,cACvB,mCAAmCkP,YAAYlP,WAAW,OAC1D;AAEF+O,SAAAA,QACF3b,OAAO2L,KAAK,0DAA0D6O,SAAS,GAAG,IAElF,MAAMjP,OAAOM,OAAO;AAAA,IAClB/D,MAAM;AAAA,IACNxF,SAAS,GAAGA,OAAO;AAAA;AAAA,IACnB8T,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGC,KAAK;AAAA,IACnC/O,UAAW8O,CACFA,UAAAA,UAAUxB,aAAa;AAAA,EAEjC,CAAA,GAGIa,YAAwBxU,QAAQ2T,SAAS,EAAEU,KAAK,MAAM;AAC3Dlb,WAAOuC,MAAM,oCAAoC;AAAA,EAAA,CAClD;AACH,GC9Ca2Z,mBAAqC,OAAOtc,MAAMC,YAAY;AACnE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQuL;AAAAA,EAAAA,IAAU1L,SAC9B,CAAA,EAAG4W,OAAOqE,aAAa,IAAIlb,KAAKqJ,oBAChC9I,QAAQP,KAAKQ,YACbyG,SAASD,aAETmU,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAItc,MAAMsc,SAAS;AAG3B,QAAM,CAAC1N,UAAUwO,cAAc,IAAI,MAAM1Y,QAAQY,IAAI,CACnD8C,OAAOwG,SAASC,KAAO4N,EAAAA,KAAMC,UAASA,KAAKjW,IAAKkW,CAAOA,OAAAA,GAAG5c,IAAI,CAAC,GAC/D6c,YAAwBxU,MAAM,CAAC,CAChC,GACKmU,UAAUa,eAAe3W,IAAKiX,CAAAA,OAAOA,GAAG3d,IAAI;AAElD,MAAIgc,YAAY,OAAO/D,SAAS2D,0BAA0B7O,MAAM,IAC5D+P,kBAAkBd;AAQtB,MANIA,UAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU5V,MAAM,CAAC,IAE7B0W,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS,IAG3C,CAACQ,QAAQxC,SAASgC,SAAS;AAC7B,UAAM,IAAI/b,MAAM,kBAAkB6c,eAAe,mBAAmB;AAGhE1O,QAAAA,cAAc,OAAOkO,iBAAiB9N,qBAAqBzB,MAAM,IACjEiQ,aAAa7O,oBAAoBC,WAAW;AAC9C4O,MAAAA;AACI,UAAA,IAAI/c,MAAM+c,UAAU;AAGxB,MAAA,CAACnO,SAASmL,SAAS5L,WAAW;AAChC,UAAM,IAAInO,MAAM,YAAYmO,WAAW,mBAAmB;AAG5D,QAAMkP,cAAcD,eAAe5R,KAAM8R,CAASA,SAAAA,KAAKvd,SAASgc,SAAS;AAErEsB,MAAAA,eAAeA,YAAYlP,aAAa;AAC1C,QAAIkP,YAAYlP,gBAAgBA;AAC9B,YAAM,IAAInO,MAAM,iBAAiB6c,eAAe,sBAAsB1O,WAAW,EAAE;AAGhFzM,UAAMwb,SACT,MAAMpQ,OAAOM,OAAO;AAAA,MAClB/D,MAAM;AAAA,MACNxF,SAAS,oCAAoCwZ,YAAYlP,WAAW;AAAA;AAAA;AAAA,MAEpEwJ,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGnP,YAAY;AAAA,MAC1CK,UAAW8O,CACFA,UAAAA,UAAU,SAAS;AAAA,IAAA,CAE7B;AAAA,EAAA;AAID,MAAA;AACF,UAAMX,YAAwBxU,QAAQ2T,WAAW5N,WAAW,GAC5D5M,OAAOuC,MAAM,iBAAiB+Y,eAAe,cAAc1O,WAAW,eAAe;AAAA,WAC9E7M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAA+BsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAEhE;AC9DA,SAASiS,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE6W,OAAO,SAAS;AAAA,IAAC3T,MAAM;AAAA,EAAU,CAAA,EAAEkB;AAC/F;AAEaoT,MAAAA,qBAAoD,OAAOxc,MAAMC,YAAY;AAClF,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQuL;AAAAA,MAAU1L,SAC9B,CAAG4W,EAAAA,KAAK,IAAI7W,KAAKqJ,oBACjB;AAAA,IAAC0S;AAAAA,EAAAA,IAAS,MAAMpH,gBAAc3U,IAAI,GAClCiH,SAASD,UAETmU,GAAAA,YAAYtE,SAAS0D,yBAAyB1D,KAAK;AACrDsE,MAAAA;AACI,UAAA,IAAItc,MAAMsc,SAAS;AAG3B,QAAMc,iBAAiB,MAAMR,YAAwBxU,MAAM;AAE3D,MAAI2T,YAAY,OAAO/D,SAAS2D,0BAA0B7O,MAAM,IAC5D+P,kBAAkBd;AAElBA,YAAUe,WAAWlB,YAAY,IACnCG,YAAYA,UAAU5V,MAAM,CAAC,IAE7B0W,kBAAkB,GAAGjB,YAAY,GAAGG,SAAS;AAI/C,QAAMsB,cAAcD,eAAe5R,KAAM8R,CAASA,SAAAA,KAAKvd,SAASgc,SAAS;AACzE,MAAI,CAACsB;AACH,UAAM,IAAIrd,MAAM,kBAAkB6c,eAAe,kBAAkB;AAGrE,MAAI,CAACQ,YAAYlP;AACf,UAAM,IAAInO,MAAM,kBAAkB6c,eAAe,8BAA8B;AAG7EK,UACF3b,OAAO2L,KAAK,2DAA2D2P,eAAe,GAAG,IAEzF,MAAM/P,OAAOM,OAAO;AAAA,IAClB/D,MAAM;AAAA,IACNxF,SAAS,mEAAmEwZ,YAAYlP,WAAW;AAAA;AAAA;AAAA,IAEnGwJ,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGnP,YAAY;AAAA,IAC1CK,UAAW8O,CACFA,UAAAA,UAAU,SAAS;AAAA,EAAA,CAE7B;AAGC,MAAA;AACF,UAAMjU,SAAS,MAAMsT,YAAwBxU,QAAQ2T,SAAS;AAC9Dxa,WAAOuC,MACL,iBAAiB+Y,eAAe,kBAAkBvT,OAAO6E,WAAW,eACtE;AAAA,WACO7M,KAAK;AACZ,UAAM,IAAItB,MAAM;AAAA,EAAiCsB,IAAIuC,OAAO,EAAE;AAAA,EAAA;AAElE,GC/DMiI,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAoCX8R,eAAqC;AAAA,EACzC7d,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA,CAACyc,IAAI,IAAI1c,KAAKqJ;AACpB,YAAQqT,MAAI;AAAA,MACV,KAAK;AACGzB,cAAAA,mBAAmBjb,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACG6b,cAAAA,mBAAmB9b,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACGuc,cAAAA,mBAAmBxc,MAAMC,OAAO;AACtC;AAAA,MACF,KAAK;AACGqc,cAAAA,iBAAiBtc,MAAMC,OAAO;AACpC;AAAA,MACF;AACE,cAAM,IAAIpB,MAAMqa,iBAAAA;AAAAA;AAAAA;AAAAA,SAGf;AAAA,IAAA;AAAA,EACL;AAEJ;ACrDsByD,eAAAA,oBACpBpc,OACAN,SACe;AACT,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C,SAC7BgH,SAASD,aACTrB,YAAYsB,OAAOK,OAAO,EAAE3B,WAC5B4K,QAA2C,CAAC;AAC9C7D,MAAAA;AAEAnM,QAAMqc,UAAUrc,MAAMqc,UAAU,MAClCrM,MAAMqM,SAAS,GAAGrc,MAAMqc,MAAM,KAE5Brc,MAAMiQ,SAASjQ,MAAMiQ,QAAQ,MAC/BD,MAAMC,QAAQ,GAAGjQ,MAAMiQ,KAAK;AAG1B,MAAA;AACS,eAAA,MAAMvJ,OAAOsH,QAAiC;AAAA,MACvDC,QAAQ;AAAA,MACRG,KAAK,aAAahJ,SAAS;AAAA,MAC3B4K;AAAAA,IAAAA,CACD;AAAA,WACMlQ,OAAO;AACVA,UAAMmM,aACRpM,OAAOC,MAAM,GAAGuC,OAAMyF,IAAI;AAAA,EAA8BhI,MAAMqM,SAASC,KAAKjK,OAAO,EAAE,CAAC;AAAA,CAAI,IAE1FtC,OAAOC,MAAM,GAAGuC,OAAMyF,IAAI;AAAA,EAA8BhI,MAAMqC,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,EAAA;AAI5EgK,MAAAA,YAAYA,SAASjE,SAAS,GAAG;AAC7B8O,UAAAA,QAAQ,IAAIC,0BAAM;AAAA,MACtBE,OAAO;AAAA,MACPD,SAAS,CACP;AAAA,QAAC7Y,MAAM;AAAA,QAAM8Y,OAAO;AAAA,QAAUC,WAAW;AAAA,MAAA,GACzC;AAAA,QAAC/Y,MAAM;AAAA,QAAiB8Y,OAAO;AAAA,QAAkBC,WAAW;AAAA,MAAA,GAC5D;AAAA,QAAC/Y,MAAM;AAAA,QAAiB8Y,OAAO;AAAA,QAAkBC,WAAW;AAAA,MAAA,GAC5D;AAAA,QAAC/Y,MAAM;AAAA,QAAS8Y,OAAO;AAAA,QAASC,WAAW;AAAA,MAAA,GAC3C;AAAA,QAAC/Y,MAAM;AAAA,QAAe8Y,OAAO;AAAA,QAAgBC,WAAW;AAAA,MAAA,GACxD;AAAA,QAAC/Y,MAAM;AAAA,QAAe8Y,OAAO;AAAA,QAAgBC,WAAW;AAAA,MAAA,GACxD;AAAA,QAAC/Y,MAAM;AAAA,QAAa8Y,OAAO;AAAA,QAAcC,WAAW;AAAA,MAAO,CAAA;AAAA,IAAA,CAE9D;AAEDjL,aAAS7F,QAASgW,CAAQ,QAAA;AAClB,YAAA;AAAA,QAACpX;AAAAA,QAAIqX;AAAAA,QAAO/a;AAAAA,QAAWgb;AAAAA,QAAWC;AAAAA,QAAe9B;AAAAA,QAAe+B;AAAAA,MAAAA,IAAeJ;AAErF,UAAIK,cAAc;AACdnb,oBAAc,OAChBmb,cAAcC,QAAoBC,oBAAAA,QAAAA,SAASrb,SAAS,CAAC;AAGvD,UAAIsb,YAAY;AACZN,oBAAc,OAChBM,YAAYC,uBAAeF,QAAAA,SAASL,SAAS,GAAGK,QAAAA,SAASrb,SAAS,CAAC;AAGjEwb,UAAAA;AACJ,cAAQT,OAAK;AAAA,QACX,KAAK;AACK,kBAAA;AACR;AAAA,QACF,KAAK;AACK,kBAAA;AACR;AAAA,QACF,KAAK;AACK,kBAAA;AACR;AAAA,QACF;AACU,kBAAA;AAAA,MAAA;AAGZvF,YAAMK,OACJ;AAAA,QACEnS;AAAAA,QACAqX;AAAAA,QACAG;AAAAA,QACAC,aAAa,GAAGA,WAAW;AAAA,QAC3BG;AAAAA,QACAL;AAAAA,QACA9B;AAAAA,MAAAA,GAEF;AAAA,QAACqC;AAAAA,MAAAA,CACH;AAAA,IAAA,CACD,GAEDhG,MAAMQ,WAAW;AAAA,EACnB;AACE3X,WAAOuC,MAAM,iDAAiD;AAElE;AC5GO,MAAM6a,eAAeA,CAACvW,QAAsB0H,KAAa3G,SAAS,OAAkB;AACnFV,QAAAA,SAASL,OAAOK,OAAO;AAEtB,SAAA,GADMU,SAASV,OAAOmW,SAASnW,OAAOyK,GAC/B,IAAIpD,IAAI4K,QAAQ,OAAO,EAAE,CAAC;AAC1C,GCOM5O,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuCjB,SAASgK,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrD6W,OAAO,UAAU;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAS,EACjC2T,OAAO,QAAQ;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAU,EAChC2T,OAAO,SAAS;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAS,EAChC2T,OAAO,UAAU;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAS,EACjC2T,OAAO,gBAAgB;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAU,EACxC2T,OAAO,UAAU;AAAA,IAAC3T,MAAM;AAAA,EAAU,CAAA,EAAEkB;AACzC;AAEA,MAAM2G,WAAYgC,CAAAA,SACT,IAAI2L,KAAAA,WAAqCC,CAAa,aAAA;AAC3D,MAAIC,iBAAiB,IAAIC,qBAAAA,QAAY9L,IAAG,GACpC+L,UAAU;AAEd,WAASC,QAAQ1d,OAAgB;AAC3Bud,sBACFA,eAAeI,MAGjBnR,GAAAA,QAAM,mBAAmBxM,KAAK,EAAE,GAC5Byd,CAGJH,YAAAA,SAASM,KAAK;AAAA,MAAC/V,MAAM;AAAA,IAAY,CAAA,GACjC0V,iBAAiB,IAAIC,6BAAY9L,IAAG;AAAA,EAAA;AAGtC,WAASmM,eAAe7d,OAAqB;AAC3Cyd,cAAU,IACVF,eAAeI,MACfL,GAAAA,SAAStd,MAAMA,KAAK;AAAA,EAAA;AAGtB,WAAS8d,UAAUC,OAAqB;AACtC,UAAMxR,OAAOxK,KAAK6G,MAAMmV,MAAMxR,IAAI;AAC9BA,SAAKkQ,UAAU,YACjBjQ,QAAM,wBAAwBuR,KAAK,GACnCT,SAAStd,MAAM+d,KAAK,KACXxR,KAAKkQ,UAAU,eACxBjQ,QAAM,2BAA2BuR,KAAK,GACtCC,WAAW,MAEXxR,QAAM,4BAA4BuR,KAAK,GACvCT,SAASM,KAAKrR,IAAI;AAAA,EAAA;AAItB,WAASyR,aAAa;AACLC,mBAAAA,oBAAoB,SAASP,OAAO,GACnDH,eAAeU,oBAAoB,iBAAiBJ,cAAc,GAClEN,eAAeU,oBAAoB,OAAOH,SAAS,GACnDP,eAAeU,oBAAoB,QAAQD,UAAU,GACrDT,eAAeI,MAAAA,GACfL,SAASY,SAAS;AAAA,EAAA;AAGpBX,iBAAeY,iBAAiB,SAAST,OAAO,GAChDH,eAAeY,iBAAiB,iBAAiBN,cAAc,GAC/DN,eAAeY,iBAAiB,OAAOL,SAAS,GAChDP,eAAeY,iBAAiB,QAAQH,UAAU;AACpD,CAAC,GAGGI,iBAAiBA,CACrBC,OACAzX,QACA7G,WACkB;AAClB,MAAIue,kBAAkB;AAEtB,QAAMrd,UAAUlB,OAAOkB,QAAQ,CAAE,CAAA,EAAE3C,MAAM,GACnCigB,YAAYpB,aAAavW,QAAQ,QAAQyX,KAAK,SAAS;AAEvD,SAAA7R,QAAA,gBAAgB+R,SAAS,EAAE,GAE1B,IAAIrb,QAAQ,CAAC7C,SAAS+C,WAAW;AAC7Bmb,aAAAA,SAAS,EAAEC,UAAU;AAAA,MAC5BZ,MAAOG,CAAU,UAAA;AACX,eAAOA,MAAMrO,YAAa,aAC5B4O,kBAAkBP,MAAMrO,WAG1BzO,QAAQ2S,OAAO,qBAAqB0K,eAAe;AAAA,MACrD;AAAA,MACAte,OAAQF,CAAQ,QAAA;AACNsC,gBAAAA,QACRgB,OAAO,IAAI5E,MAAM,GAAGsB,IAAIyM,IAAI,EAAE,CAAC;AAAA,MACjC;AAAA,MACA2R,UAAUA,MAAM;AACNhc,gBAAAA,QAAQ,gBAAgB,GAChC7B,QAAQ;AAAA,MAAA;AAAA,IACV,CACD;AAAA,EAAA,CACF;AACH,GAEMoe,qBAA6D;AAAA,EACjElgB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aACE;AAAA,EACFI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQuL;AAAAA,MAAQ/I,OAAAA;AAAAA,IAAAA,IAAS3C,SAErCM,QAA0B,MAAMoU,gBAAc3U,IAAI,GAClDiH,SAASD,UAAU;AAEzB,QAAIzG,MAAMmN,MAAM;AACRiP,YAAAA,oBAAoBpc,OAAON,OAAO;AACxC;AAAA,IAAA;AAGF,QAAIM,MAAMwe,QAAQ;AAChB,YAAML,QAAQne,MAAMwe;AAEpB,UAAI,CAACL;AACG,cAAA,IAAI7f,MAAM,uBAAuB;AAGnC4f,YAAAA,eAAeC,OAAOzX,QAAQ7G,MAAM;AAC1C;AAAA,IAAA;AAGF,UAAM,CAAC4c,eAAe9B,aAAa,IAAIlb,KAAKqJ,oBACtC2V,oBAAoB/F,CAAQ1Y,CAAAA,MAAM,cAAc,GAEhD4a,YAAY6B,iBAAiBjQ,oBAAoBiQ,aAAa;AAChE7B,QAAAA;AACI,YAAA,IAAItc,MAAMsc,SAAS;AAG3B,UAAM8D,mBAAmB,MAAMhY,OAAOwG,SACnCC,KAAK,EACL4N,KAAM7N,CAAaA,aAAAA,SAASnI,IAAKkW,CAAOA,OAAAA,GAAG5c,IAAI,CAAC,GAE7CsgB,oBAAoB,OAAOlC,iBAC/B5P,qBAAqBzB,QAAQ;AAAA,MAACjJ,SAAS;AAAA,IAAA,CAAuB;AAC5D,QAAA,CAACuc,iBAAiBrG,SAASsG,iBAAiB;AAC9C,YAAM,IAAIrgB,MAAM,mBAAmBqgB,iBAAiB,iBAAiB;AAGvE,UAAMC,oBAAoB,OAAOjE,iBAC/B9N,qBAAqBzB,QAAQ;AAAA,MAACjJ,SAAS;AAAA,IAAA,CAAuB;AAC5Duc,QAAAA,iBAAiBrG,SAASuG,iBAAiB;AAC7C,YAAM,IAAItgB,MAAM,mBAAmBsgB,iBAAiB,kBAAkB;AAGlEhf,UAAAA,MAAM4M,oBAAoBoS,iBAAiB;AAC7Chf,QAAAA;AACI,YAAA,IAAItB,MAAMsB,GAAG;AAGjB,QAAA;AACIuM,YAAAA,WAAW,MAAMzF,OAAOsH,QAA6B;AAAA,QACzDC,QAAQ;AAAA,QACRG,KAAK,aAAauQ,iBAAiB;AAAA,QACnCvS,MAAM;AAAA,UACJuO,eAAeiE;AAAAA,UACfC,aAAaJ;AAAAA,QAAAA;AAAAA,MACf,CACD;AAcD,UAZA5e,OAAOuC,MACL,mBAAmBC,OAAMiM,MAAMqQ,iBAAiB,CAAC,OAAOtc,OAAMiM,MAAMsQ,iBAAiB,CAAC,KACxF,GAEKH,qBACH5e,OAAOuC,MACL,+GACF,GAGFvC,OAAOuC,MAAM,OAAOC,OAAMiM,MAAMnC,SAASgS,KAAK,CAAC,UAAU,GAErDne,MAAM8e;AACR;AAGF,YAAMZ,eAAe/R,SAASgS,OAAOzX,QAAQ7G,MAAM,GACnDA,OAAOuC,MAAM,OAAOC,OAAMiM,MAAMnC,SAASgS,KAAK,CAAC,YAAY;AAAA,aACpDre,OAAO;AACVA,YAAMmM,aACRpM,OAAOuC,MAAM,GAAGC,OAAMyF,IAAI;AAAA,EAA4BhI,MAAMqM,SAASC,KAAKjK,OAAO,EAAE,CAAC;AAAA,CAAI,IAExFtC,OAAOuC,MAAM,GAAGC,OAAMyF,IAAI;AAAA,EAA4BhI,MAAMqC,OAAO,EAAE,CAAC;AAAA,CAAI;AAAA,IAAA;AAAA,EAE9E;AAEJ,GC7OMiI,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAUX2U,eAAe,CAAC,WAAW,UAAU,QAAQ,GAM7CC,uBAA0D;AAAA,EAC9D3gB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQuL;AAAAA,IAAAA,IAAU1L,SAC9BM,QAAQP,KAAKQ,YACb,CAACkF,OAAO,IAAI1F,KAAKqJ,oBACjBpC,SAASD,UAAU,GAEnBmU,YAAYzV,WAAWqH,oBAAoBrH,OAAO;AACpDyV,QAAAA;AACI,YAAA,IAAItc,MAAMsc,SAAS;AAGrB,UAAA,CAAC1N,UAAU4N,eAAe,IAAI,MAAM9X,QAAQY,IAAI,CACpD8C,OAAOwG,SAASC,OAAO4N,KAAMC,CAAAA,SAASA,KAAKjW,IAAKkW,CAAAA,OAAOA,GAAG5c,IAAI,CAAC,GAC/DqI,OAAOsH,QAAQ;AAAA,MAACI,KAAK;AAAA,IAAY,CAAA,CAAC,CACnC;AAED,QAAIpO,MAAMif,cAAc,CAACF,aAAa1G,SAASrY,MAAMif,UAAU;AAC7D,YAAM,IAAI3gB,MAAM,oBAAoB0B,MAAMif,UAAU,eAAe;AAGrE,UAAMxS,cAAc,OAAOtH,WAAW0H,qBAAqBzB,MAAM;AAC7D8B,QAAAA,SAASmL,SAAS5L,WAAW;AAC/B,YAAM,IAAInO,MAAM,YAAYmO,WAAW,kBAAkB;AAGrDyS,UAAAA,mBAAmBpE,gBAAgBzC,SAAS,gBAAgB;AAC5D/L,YAAA,8BAA8B4S,mBAAmB,QAAQ,QAAQ;AAGjEC,UAAAA,UAAU,QADOD,mBAAmBlf,MAAMif,aAAa,aACpBG,2BAA2BhU,QAAQvL,MAAM;AAE9E,QAAA;AACI6G,YAAAA,OAAOwG,SAASS,OAAOlB,aAAa;AAAA,QAAC0S;AAAAA,MAAAA,CAAQ,GACnDtf,OAAOuC,MAAM,8BAA8B;AAAA,aACpCxC,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA6BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC5D;AAEJ;AAEA,eAAeid,2BAA2BhU,QAAqBvL,QAAsB;AAC7Ewf,QAAAA,OAAO,MAAMjU,OAAOM,OAA6B;AAAA,IACrD/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACToL,SAAS,CACP;AAAA,MACEvF,OAAO;AAAA,MACP3J,MAAM;AAAA,IAAA,GAER;AAAA,MACE2J,OAAO;AAAA,MACP3J,MAAM;AAAA,IACP,CAAA;AAAA,EAAA,CAEJ;AAEGghB,SAAAA,SAAS,aACXxf,OAAOuC,MACL;AAAA,CACF,GAGKid;AACT;AC3FA,IAAe,eAAA;AAAA,EACbhhB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,MAAMmV,2BAAiD;AAAA,EACrDjhB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPF,UAAU;AAAA,EACVH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtB,CAAC6K,QAAQ0Q,IAAIkE,OAAO,IAAI1f,KAAKqJ,oBAC7BpC,SAASD,UAAU;AAErB,QAAA,CAACC,OAAOwG,SAASqS;AACnB,YAAM,IAAIjhB,MAAM;AAAA,6BAAmE;AAGrF,QAAI,CAACiM;AACG,YAAA,IAAIjM,MAAM,mCAAmC;AAGrD,QAAI,CAAC,CAAC,OAAO,KAAK,EAAE+Z,SAAS9N,MAAM;AAC3B,YAAA,IAAIjM,MAAM,uCAAuC;AAGzD,QAAI,CAAC2c;AACG,YAAA,IAAI3c,MAAM,+BAA+B;AAG7CiM,QAAAA,WAAW,SAAS,CAAC4U;AACjB,YAAA,IAAI7gB,MAAM,mDAAmD;AAGrE,UAAM6G,UAAU,GAAG8V,EAAE,IACfQ,UAAUjP,oBAAoBrH,OAAO;AACvCsW,QAAAA;AACI,YAAA,IAAInd,MAAMmd,OAAO;AAGnB7V,UAAAA,WAAW,MAAMc,OAAOwG,SAASC,KAAAA,GAAQrD,KAAM0V,CAAAA,SAASA,KAAKnhB,SAAS8G,OAAO;AAEnF,QAAI,CAACS;AACG,YAAA,IAAItH,MAAM,mBAAmB;AAGrC,QAAIiM,WAAW,OAAO;AACbnI,aAAAA,MAAMwD,QAAQuZ,OAAO;AAC5B;AAAA,IAAA;AAGEvZ,QAAAA,QAAQuZ,YAAYA,SAAS;AACxB/c,aAAAA,MAAM,uBAAuB+c,OAAO,QAAQ;AACnD;AAAA,IAAA;AAGEA,gBAAY,aACdtf,OAAOuC,MACL;AAAA,CACF,GAGF,MAAMsE,OAAOwG,SAASqS,KAAKpa,SAAS;AAAA,MAACga;AAAAA,IAAAA,CAAyC,GAC9Etf,OAAOuC,MAAM,4BAA4B;AAAA,EAAA;AAE7C,GC5DMgI,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUjB,SAASgK,gBAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EAAE6W,OAAO,SAAS;AAAA,IAAC3T,MAAM;AAAA,EAAU,CAAA,EAAEkB;AAC/F;AAMA,MAAM4W,uBAAiE;AAAA,EACrEphB,MAAM;AAAA,EACNiM,OAAO;AAAA,EAAA,UACPF;AAAAA,EACAH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW2E;AAAAA,MAAQvL;AAAAA,QAAUH,SAC9B;AAAA,MAAC8b;AAAAA,IAAAA,IAAS,MAAMpH,gBAAc3U,IAAI,GAClC,CAACwb,EAAE,IAAIxb,KAAKqJ;AAClB,QAAI,CAACmS;AACG,YAAA,IAAI3c,MAAM,+BAA+B;AAGjD,UAAM6G,UAAU,GAAG8V,EAAE,IACfQ,UAAUjP,oBAAoBrH,OAAO;AACvCsW,QAAAA;AACIA,YAAAA;AAGJD,YACF3b,OAAO2L,KAAK,4DAA4DrG,OAAO,GAAG,IAElF,MAAMiG,OAAOM,OAAO;AAAA,MAClB/D,MAAM;AAAA,MACNxF,SACE;AAAA;AAAA,MACF8T,QAAS4F,CAAAA,UAAU,GAAGA,KAAK,GAAGC,KAAK;AAAA,MACnC/O,UAAW8O,CACFA,UAAAA,UAAU1W,WAAW;AAAA,IAAA,CAE/B,GAGH,MAAMsB,UAAU,EAAEyG,SAASwS,OAAOva,OAAO,GACzCtF,OAAOuC,MAAM,8BAA8B;AAAA,EAAA;AAE/C,GClDMud,OAAOA,MAAM,MAEbvV,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAwCjB,SAASwV,aAAWC,UAA0C;AAC5D,QAAM7f,QAA2B,CAAC;AAClC,SAAI6f,SAASC,UACX9f,MAAM8f,QAAQ,GAAGD,SAASC,KAAK,GAAGC,MAAM,GAAG,IAGzCF,SAAS,mBAAmB,MAC9B7f,MAAMggB,mBAAmBC,SAASJ,SAAS,mBAAmB,GAAG,EAAE,IAGjE,OAAOA,SAASK,MAAQ,QAC1BlgB,MAAMkgB,MAAMxH,CAAAA,CAAQmH,SAASK,MAG3B,OAAOL,SAASM,SAAW,QAC7BngB,MAAMmgB,SAASzH,CAAQmH,CAAAA,SAASM,SAG9B,OAAON,SAASO,SAAW,QAC7BpgB,MAAMogB,SAAS1H,CAAQmH,CAAAA,SAASO,SAG9B,OAAOP,SAASQ,WAAa,QAC/BrgB,MAAMqgB,WAAW3H,CAAAA,CAAQmH,SAASQ,WAGhC,OAAOR,SAAS3J,YAAc,QAChClW,MAAMkW,YAAYwC,CAAQmH,CAAAA,SAAS3J,YAGjC,OAAO2J,SAASR,OAAS,QAC3Brf,MAAMqf,OAAOQ,SAASR,OAGjBrf;AACT;AASA,MAAMsgB,uBAA0D;AAAA,EAC9DjiB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAOtC;AAAAA,MAASqL;AAAAA,IAAU1L,IAAAA,SAC9CgH,SAASD,aACT,CAACkU,eAAe4F,iBAAiB,IAAI9gB,KAAKqJ,oBAC1C9I,QAAQ4f,aAAWngB,KAAKQ,UAAU;AAExC,QAAIkF,UAAUwV,gBAAgB,GAAGA,aAAa,KAAK;AAC9CxV,gBACHA,UAAU,MAAM6H,oBAAoBtN,SAAS;AAAA,MAACyC,SAAS;AAAA,IAAA,CAA2B;AAG9EsZ,UAAAA,UAAUjP,oBAAoBrH,OAAO;AACvCsW,QAAAA;AACIA,YAAAA;AAKJ,QAAA,EADa,MAAM/U,OAAOwG,SAASC,KAAAA,GACzBrD,KAAM6J,CAAAA,QAAQA,IAAItV,SAAS8G,OAAO;AAC9C,YAAM,IAAI7G,MAAM,sBAAsB6G,OAAO,aAAa;AAItD,UAAA;AAAA,MAACC;AAAAA,IAAAA,IAAasB,OAAOK,OAAO;AAE3B3E,WAAAA,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,WAAW,CAAC,KAAKtS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,SAAS,CAAC,KAAKtS,OAAMgE,KAAKlB,OAAO,EAAEiB,OAAO,EAAE,CAAC,SAAI,GAC9EvG,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,EAAE;AAEf,QAAIoe,kBAAkBD;AACjBC,wBACHA,kBAAkB,MAAMpV,OAAOM,OAAO;AAAA,MACpC/D,MAAM;AAAA,MACNxF,SAAS;AAAA,MACTuI,SAASlK,cAAKJ,QAAAA,KAAKL,SAAS,GAAGoF,OAAO,SAAS;AAAA,MAC/C8Q,QAAQD,KAAAA;AAAAA,IAAAA,CACT;AAGH,UAAMyK,aAAa,MAAMC,cAAcF,iBAAiBrb,SAASiG,QAAQpL,KAAK;AAC9E,QAAI,CAACygB,YAAY;AACf5gB,aAAOuC,MAAM,WAAW;AACxB;AAAA,IAAA;AAIEqe,mBAAe,OACjB5gB,OAAOuC,MAAM,sBAAsBC,OAAMgE,KAAKlB,OAAO,CAAC,SAAS9C,OAAMgE,KAAKoa,UAAU,CAAC,GAAG;AAG1F,QAAIE,cAAc,0BACd5f,UAAUlB,OAAOkB,QAAQ4f,WAAW,EAAEviB,MAAM;AAChD,UAAMwiB,aAAcpR,CAA4BA,cAAA;AAC1CA,MAAAA,UAAS8D,SAASqN,eACpB5f,QAAQiB,WACRjB,UAAUlB,OAAOkB,QAAQyO,UAAS8D,IAAI,EAAElV,WAC/BoR,UAAS8D,SAASqN,eAAenR,UAASjL,WACnDxD,QAAQ2S,OAAO,GAAGlE,UAAS8D,IAAI,KAAK9D,UAAS5J,OAAO,IAAI4J,UAASiE,KAAK,MAGxEkN,cAAcnR,UAAS8D;AAAAA,IAAAA,GAGnBlV,QAAQqD,KAAKjD,IAAI;AACnB,QAAA;AACF,YAAMqiB,+BAAc;AAAA,QAClBna;AAAAA,QACAvB;AAAAA,QACAsb;AAAAA,QACAG;AAAAA,QACA,GAAG5gB;AAAAA,MAAAA,CACJ,GACDe,QAAQiB,QAAQ;AAAA,aACTpC,KAAK;AACZmB,YAAAA,QAAQmB,QACFtC;AAAAA,IAAAA;AAGDwC,WAAAA,MAAM,oBAAoBoR,kBAAS/R,QAAAA,KAAKjD,QAAQJ,KAAK,CAAC,GAAG;AAAA,EAAA;AAEpE;AAGA,eAAesiB,cACbI,aACA3b,SACAiG,QACApL,OACA;AACA,MAAI8gB,gBAAgB;AACX,WAAA;AAGHC,QAAAA,UAAUvgB,cAAAA,QAAKwgB,WAAWF,WAAW,IACvCA,cACAtgB,cAAAA,QAAKL,QAAQnB,QAAQ4B,IAAI,GAAGkgB,WAAW;AAE3C,MAAIG,WAAW,MAAMxR,sBAAGyR,KAAKH,OAAO,EAAEI,MAAMxB,IAAI;AAC1CyB,QAAAA,gBAAgBH,WAAWA,SAASI,OAAO,IAAI7gB,cAAAA,QAAKqR,SAASkP,OAAO,EAAEO,QAAQ,GAAG,MAAM;AAE7F,MAAI,CAACL,UAAU;AACb,UAAMM,aAAaH,gBAAgB5gB,cAAAA,QAAKgC,QAAQue,OAAO,IAAIA;AAErDtR,UAAAA,cAAAA,QAAGvO,MAAMqgB,YAAY;AAAA,MAACpgB,WAAW;AAAA,IAAA,CAAK;AAAA,EAAA;AAGxCqgB,QAAAA,YAAYJ,gBAAgBL,UAAUvgB,cAAAA,QAAKJ,KAAK2gB,SAAS,GAAG5b,OAAO,SAAS;AAGlF,SAFA8b,WAAW,MAAMxR,sBAAGyR,KAAKM,SAAS,EAAEL,MAAMxB,IAAI,GAE1C,CAAC3f,MAAMkW,aAAa+K,YAAYA,SAASI,OAAAA,KAOvC,CANoB,MAAMjW,OAAOM,OAAO;AAAA,IAC1C/D,MAAM;AAAA,IACNxF,SAAS,SAASqf,SAAS;AAAA,IAC3B9W,SAAS;AAAA,EAAA,CACV,IAGQ,KAIJ8W;AACT;ACtNA,MAAM5I,SAAU5S,CAAAA,QAAgB,WAAaA,GAAG,YAE1CoE,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8DjB,SAASqX,YAAYC,MAAoC;AACvD,SAAO,OAAOA,OAAS,MAAchU,SAAYgL,CAAQgJ,CAAAA;AAC3D;AAEA,SAAS9B,WAAWC,UAA0C;AACtD8B,QAAAA,gCAAgCF,YAAY5B,SAAS,mCAAmC,CAAC,GACzF+B,qBAAqBH,YAAY5B,SAAS,sBAAsB,CAAC,GACjEG,mBAAmByB,YAAY5B,SAAS,mBAAmB,CAAC,GAC5DgC,gBAAgBJ,YAAY5B,SAAS,gBAAgB,CAAC,GACtDiC,6BAA6BL,YAAY5B,SAAS,+BAA+B,CAAC,GAClFkC,uBAAuBN,YAAY5B,SAAS,wBAAwB,CAAC,GACrE7G,UAAUyI,YAAY5B,SAAS7G,OAAO,GACtCgJ,UAAUP,YAAY5B,SAASmC,OAAO;AACrC,SAAA;AAAA,IACLL;AAAAA,IACAC;AAAAA,IACA5B;AAAAA,IACA8B;AAAAA,IACAC;AAAAA,IACAF;AAAAA,IACA7I;AAAAA,IACAgJ;AAAAA,EACF;AACF;AAEA,MAAMC,uBAA6C;AAAA,EACjD5jB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA;AAAAA,EAEAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO6f;AAAAA,QAAmBxiB,SAC9CM,QAAQ4f,WAAWngB,KAAKQ,UAAU,GAClC;AAAA,MACJ0hB;AAAAA,MACAC;AAAAA,MACA5B;AAAAA,MACA8B;AAAAA,MACAC;AAAAA,MACAF;AAAAA,IACE7hB,IAAAA,OAEE+Q,YAAYoR,qBAAqB1iB,KAAKQ,UAAU,GAChDyG,SAASD,UAET,GAAA,CAACuM,MAAMoP,MAAM,IAAI3iB,KAAKqJ;AAC5B,QAAI,CAACkK;AACH,YAAM,IAAI1U,MACR,iFAAiF+D,OAAMsS,KACrF,QACF,CAAC,cACH;AAGF,UAAMgG,gBAAgB,MAAM0H,uBAAuBD,QAAQ1iB,OAAO;AAC5D4M,YAAA,mCAAmCqO,aAAa,GAAG;AAEnD2H,UAAAA,QAAQ,gBAAgB1V,KAAKoG,IAAI;AACnCuP,QAAAA,aACAC,YACAC,iBAAiB;AAEjBH,QAAAA;AACFhW,cAAM,2CAA2C,GACjDiW,cAAc,MAAMG,aAAa1P,IAAI;AAAA,SAChC;AACL,YAAM2P,aAAaniB,cAAAA,QAAKL,QAAQnB,QAAQ4B,OAAOoS,IAAI,GAC7C4P,YAAY,MAAMnT,sBAAGyR,KAAKyB,UAAU,EAAExB,MAAM,MAAM,IAAI;AAC5D,UAAI,CAACyB;AACH,cAAM,IAAItkB,MAAM,GAAGqkB,UAAU,oCAAoC;AAGnEF,uBAAiBG,UAAUC,YAAAA,GACvBJ,iBACFF,cAAcI,cAEdH,aAAahiB,cAAAA,QAAKgC,QAAQmgB,UAAU,GACpCJ,cAAc,MAAMO,KAAAA,iBAAiBH,UAAU;AAAA,IAAA;AAInD,UAAMI,eAAerc,OAAOsc,MAAM,EAAEjc,OAAO;AAAA,MAAC5B,SAASwV;AAAAA,IAAAA,CAAc,GAG7D;AAAA,MAACvV;AAAAA,MAAWD;AAAAA,IAAAA,IAAW4d,aAAahc,OAAO;AAE1C3E,WAAAA,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,WAAW,CAAC,KAAKtS,OAAMgE,KAAKjB,SAAS,EAAEgB,OAAO,EAAE,CAAC,SAAI,GAClFvG,OAAOuC,MAAM,UAAKC,OAAMsS,KAAK,SAAS,CAAC,KAAKtS,OAAMgE,KAAKlB,OAAO,EAAEiB,OAAO,EAAE,CAAC,SAAI,GAC9EvG,OAAOuC,MAAM,6DAAmD,GAChEvC,OAAOuC,MAAM,wSAAmD,GAChEvC,OAAOuC,MAAM,EAAE;AAEf,QAAIue,aACAvC,iBACA6E,WACAC,eAAsD,MACtDC;AAEJ,aAASvC,WAAWpM,MAAqB;AACvC,YAAM4O,mBAAmB5O,KAAKf,OACxB4P,WAAW7O,KAAKlB,QAAQqN;AAU9B,UATAwC,UAAUG,cAAc9O,IAAI,GAExB4O,oBAAoB5O,KAAKf,UAAUe,KAAK5O,YACtCsd,gBACFK,cAAcL,YAAY,GAE5BA,eAAe,OAGbG;AACF;AAIF,YAAMG,WAAW7C,aACX8C,gBAAgBR,aAAaxhB,KAAKjD,IAAI;AAC5CykB,UAAAA,YAAYxhB,KAAKjD,OACjBmiB,cAAcnM,KAAKlB,MAEf8K,mBAAmBA,gBAAgBpc,SAAS;AAC9C,cAAM0hB,YAAYlQ,kBAAAA,QAAS/R,KAAKjD,IAAAA,IAAQilB,eAAe;AAAA,UACrDE,sBAAsB;AAAA,QAAA,CACvB;AACDvF,wBAAgB1K,OAAO,UAAU8P,QAAQ,KAAKE,SAAS,KACvDtF,gBAAgBpc,QAAQ;AAAA,MAAA;AAG1Boc,wBAAkBve,OAAOkB,QAAQ,QAAQyT,KAAKlB,IAAI,UAAU,EAAElV,MAE1D8kB,GAAAA,iBACFK,cAAcL,YAAY,GAC1BA,eAAe,OAGjBA,eAAeU,YAAY,MAAM;AAC/B,cAAMF,YAAYlQ,kBAAAA,QAAS/R,KAAKjD,IAAAA,IAAQilB,eAAe;AAAA,UACrDE,sBAAsB;AAAA,QAAA,CACvB;AAEGvF,4BACFA,gBAAgB1K,OAAO,GAAGyP,OAAO,GAAG3O,KAAKlB,IAAI,KAAKoQ,SAAS;AAAA,SAE5D,EAAE;AAAA,IAAA;AAGP,aAASG,QAAQ;AAAA,MAAC9Z;AAAAA,IAAAA,GAA8B;AAC1CmZ,UAAAA,gBACFK,cAAcL,YAAY,GAG5BA,eAAe,MAEXnZ,WAAWkZ,aAAa7E,iBAAiB;AAC3C,cAAMsF,YAAYlQ,kBAAAA,QAAS/R,KAAKjD,IAAAA,IAAQykB,WAAW;AAAA,UACjDU,sBAAsB;AAAA,QAAA,CACvB;AACDvF,wBAAgB1K,OAAO,UAAUiN,WAAW,KAAK+C,SAAS,KAC1DtF,gBAAgBpc,QAAQ;AAAA,MAAA,MACfoc,oBACTA,gBAAgBlc,KAAK;AAAA,IAAA;AAKrB,QAAA;AACI,YAAA;AAAA,QAAC4hB;AAAAA,QAASC;AAAAA,MAAAA,IAAY,MAAMC,sBAAAA,QAAazB,aAAa;AAAA,QAC1D7b,QAAQqc;AAAAA,QACRP;AAAAA,QACAzR;AAAAA,QACA6P;AAAAA,QACAgB;AAAAA,QACAD;AAAAA,QACAG;AAAAA,QACAC;AAAAA,QACA/B;AAAAA,QACA6B;AAAAA,MAAAA,CACD;AAEO,cAAA;AAAA,QAAC9X,SAAS;AAAA,MAAA,CAAK,GAEvBlK,OAAOuC,MAAM;AAAA,GAAiD0hB,SAASnJ,aAAa,GACpFsJ,cAAcF,UAAUlkB,MAAM;AAAA,aACvBD,KAAK;AASZ,UARAikB,QAAQ;AAAA,QAAC9Z,SAAS;AAAA,MAAM,CAAA,GAQpB,EALF,CAACmY,mBACDtiB,IAAIuM,YACJvM,IAAIuM,SAASF,eAAe,OAC5BrM,IAAI0T,SAAS;AAGP1T,cAAAA;AAGFuC,YAAAA,UAAU,CACdvC,IAAIuC,SACJ,IACA,6BACA,yDACA,gEACA,EAAE,EACF/B,KAAK;AAAA,CAAI,GAGLN,QAAQ,IAAIxB,MAAM6D,OAAO;AACzB+hB,YAAAA,MAAAA,UAAUtkB,IAAIskB,SACpBpkB,MAAMqM,WAAWvM,IAAIuM,UACrBrM,MAAMqkB,eAAevkB,IAAIukB,cAEnBrkB;AAAAA,IAAAA;AAAAA,EACR;AAEJ;AAEA,eAAeuiB,uBAAuBD,QAAgB1iB,SAA4B;AAC1E,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,IAAQuL;AAAAA,EAAAA,IAAU1L,SAC9BgH,SAASD,UAAU;AAEzB,MAAI2b,QAAQ;AACJ3G,UAAAA,UAAUjP,oBAAoB4V,MAAM;AACtC3G,QAAAA;AACI,YAAA,IAAInd,MAAMmd,OAAO;AAAA,EAAA;AAI3BnP,UAAM,6BAA6B;AAC7BvL,QAAAA,UAAUlB,OAAOkB,QAAQ,6BAA6B,EAAE3C,MAAM,GAC9D8O,WAAW,MAAMxG,OAAOwG,SAASC,KAAK;AAC5CpM,UAAQiB,QAAQ,oCAAoC;AAEpD,MAAI2Y,gBAAgByH,SAAS,GAAGA,MAAM,KAAK;AAC3C,MAAI,CAACzH;AACa,oBAAA,MAAM3N,oBAAoBtN,SAAS;AAAA,MACjDyC,SAAS;AAAA,MACT8K,eAAe;AAAA,IAAA,CAChB;AAAA,WACQ,CAACC,SAASpD,KAAM3E,aAAYA,QAAQ9G,SAASsc,aAAa,GAAG;AAQtE,QAPArO,QAAM,uDAAuD,GAOzD,CANiB,MAAMlB,OAAOM,OAAO;AAAA,MACvC/D,MAAM;AAAA,MACNxF,SAAS,YAAYwY,aAAa;AAAA,MAClCjQ,SAAS;AAAA,IAAA,CACV;AAGC,YAAM,IAAIpM,MAAM,YAAYqc,aAAa,kBAAkB;AAGvDjU,UAAAA,OAAOwG,SAASS,OAAOgN,aAAa;AAAA,EAAA;AAGrCA,SAAAA;AACT;AAEA,SAASwH,qBAAqBniB,OAA0B;AAChD,QAAA;AAAA,IAACgZ;AAAAA,IAASgJ;AAAAA,EAAAA,IAAWhiB;AAC3B,MAAIgZ,WAAWgJ;AACP,UAAA,IAAI1jB,MAAM,yCAAyC;AAG3D,SAAI0B,MAAMgZ,UACD,oBAGLhZ,MAAMgiB,UACD,sBAGF;AACT;AAEA,SAASsB,cAAc9O,MAAqB;AAC1C,MAAI,CAACA,KAAKf,SAAS,OAAOe,KAAK5O,UAAY;AAClC,WAAA;AAGT,QAAMud,UAAUrd,KAAK+N,MAAOW,KAAK5O,UAAU4O,KAAKf,QAAS,GAAG;AAC5D,SAAO,IAAI2Q,kBAAS,QAAA,GAAGjB,OAAO,IAAI,GAAG,GAAG,CAAC;AAC3C;AAEA,SAAST,aAAalR,MAAa;AACjBJ,SAAAA,MAAAA,MAAM,CAACE,mBAAQ;AAAA,IAAC+S,UAAU;AAAA,EAAK,CAAA,CAAC,CAAC,EAClC;AAAA,IAAC7S,KAAAA;AAAAA,IAAKW,QAAQ;AAAA,EAAA,CAAK;AACpC;AAEA,SAAS8R,cAAcF,UAA2BlkB,QAAsB;AACtE,QAAMykB,aAAaP,SAAS9N,OAAQzK,CAAAA,UAASA,MAAK7D,SAAS,OAAO;AAElE,MAAI,CAAC2c,WAAWpc;AACd;AAGF,QAAMsD,QAAQ3L,OAAO2L,QAAQ3L,OAAOuC,OAAOmiB,KAAK1kB,MAAM;AAEjD+Y,OAAAA,OAAO,2CAAsC,GAAG0L,WAAWpc,SAAS,IAAI,WAAW,OAAO,GAE/F6b,SAASzd,QAASwS,CAAY,YAAA;AACvB,SAAA,KAAKA,QAAQtH,GAAG,EAAE;AAAA,EAAA,CACxB;AACH;AC7XagT,MAAAA,qBAAuC,OAAO/kB,MAAMC,YAAY;AACrE,QAAA;AAAA,IAAC+G;AAAAA,IAAW5G;AAAAA,EAAAA,IAAUH,SACtBgH,SAASD,UAAAA,GAEToU,UAAU,MAAMK,YAAwBxU,MAAM;AACpD7G,SAAOuC,MACLyY,QACG9V,IAAK4O,CAAAA,QAAQ,GAAGuG,YAAY,GAAGvG,IAAItV,IAAI,OAAOsV,IAAIlH,eAAe,YAAY,EAAE,EAC/ErM,KAAK;AAAA,CAAI,CACd;AACF,GCXMqkB,sBAA4C;AAAA,EAChDpmB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPF,UAAU;AAAA,EACVH,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SAEtBwN,WAAW,MADFzG,UAAU,EACKyG,SAASC,KAAK;AAC5CtN,WAAOuC,MAAM8K,SAASnI,IAAK4O,SAAQA,IAAItV,IAAI,EAAE+B,KAAK;AAAA,CAAI,CAAC,GAGvD,MAAMokB,mBAAmB/kB,MAAMC,OAAO;AAAA,EAAA;AAE1C,GCXM0K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeXsa,gBAAsC;AAAA,EAC1CrmB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,mBAAmC;AAAA,MAEjDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GC9BMA,aAAW;AAAA;AAAA;AAAA,GAKXua,kBAAwC;AAAA,EAC5CtmB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,qBAAqC;AAAA,MAEnDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GCIMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GA0BXwa,yBAA4D;AAAA,EAChEvmB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA;AAAA,EAEbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,QAAUH,SACtB;AAAA,MAACsZ;AAAAA,MAASgJ;AAAAA,MAAS6C;AAAAA,MAAO3f;AAAAA,MAAIC;AAAAA,IAAAA,IAAW1F,KAAKQ,YAC9C,CAAC+S,IAAI,IAAIvT,KAAKqJ,oBACdgc,WAAWrlB,KAAKQ,WAAW8kB,OAC3Bre,SAASvB,UAAUsB,YAAYuc,QAAQjc,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAAU;AAE3E,QAAIuS,WAAWgJ;AACP,YAAA,IAAI1jB,MAAM,yCAAyC;AAG3D,QAAI4G,MAAM8N;AACF,YAAA,IAAI1U,MAAM,6CAA6C;AAG/D,QAAIyS,YAAmC;AAKvC,SAJIiI,WAAWgJ,aACbjR,YAAYiI,UAAU,oBAAoB,sBAGxChG,MAAM;AACFgS,YAAAA,cAAcxkB,sBAAKL,QAAQnB,QAAQ4B,IAAOoS,GAAAA,IAAI,GAC9C9O,UAAU6gB,eAAAA,QAAMrc,MAAM,MAAM+G,cAAAA,QAAGwV,SAASD,aAAa,MAAM,CAAC,GAC5Dpd,SAAS,MAAMsd,eAAehhB,SAAS6M,WAAWrK,MAAM;AAC9D7G,aAAOuC,MAAM+iB,iBAAiBvd,QAAQmJ,SAAS,CAAC;AAChD;AAAA,IAAA;AAIF,UAAMqU,QAAQlgB,MAAMmgB,KAAAA,KAAK,GACnBC,MAAMR,WAAW,UAAU,QAC3BS,UAAU/kB,cAAKJ,QAAAA,KAAKolB,YAAG1Q,QAAAA,OAAAA,GAAU,cAAc,GAAGsQ,KAAK,IAAIE,GAAG,EAAE,GAChExjB,YAAYgjB,WAAWC,eAAAA,QAAMjjB,YAAYD,KAAKC,WAC9C2jB,eAAgBvgB,MAAO,MAAMwB,OAAOc,YAAYtC,EAAE,KAAO;AAAA,MAACD,KAAKmgB;AAAAA,MAAOzb,OAAO;AAAA,IAAY;AACzF8F,UAAAA,cAAAA,QAAGvO,MAAMV,sBAAKJ,KAAKolB,YAAAA,QAAG1Q,OAAO,GAAG,YAAY,GAAG;AAAA,MAAC3T,WAAW;AAAA,IAAA,CAAK,GACtE,MAAMsO,sBAAG7N,UAAU2jB,SAASzjB,UAAU2jB,cAAc,MAAM,CAAC,GAAG,MAAM;AAEpE,UAAMC,SAASC,UAAU;AACrBd,aAEFe,uBAAuBL,OAAO,GAC9B1lB,OAAOuC,MAAM,eAAemjB,OAAO,EAAE,GACrC1lB,OAAOuC,MAAM,gDAAgD,GAC7DvC,OAAOuC,MAAM,kDAAkD,GAC/DyjB,0BAAShB,MAAMU,OAAO,EAAExW,GAAG,UAAU,OACnClP,OAAOuC,MAAM,EAAE,GACR0jB,8BAA8BP,OAAO,EAC7C,GACDQ,eAAAA,QAAML,OAAOM,KAAKN,OAAOjmB,KAAKwmB,OAAOV,OAAO,GAAG;AAAA,MAACW,OAAO;AAAA,IAAA,CAAU,MAGjEH,eAAAA,QAAMplB,KAAK+kB,OAAOM,KAAKN,OAAOjmB,KAAKwmB,OAAOV,OAAO,GAAG;AAAA,MAACW,OAAO;AAAA,IAAU,CAAA,GACtE,MAAMJ,8BAA8BP,OAAO,GAC3C,MAAM9V,cAAAA,QAAG0W,OAAOZ,OAAO,EAAEpE,MAAMxB,cAAAA,OAAI;AAGrC,mBAAemG,8BAA8BM,UAAkB;AACzDliB,UAAAA;AACA,UAAA;AACFA,kBAAU6gB,uBAAMrc,MAAM,MAAM+G,sBAAGwV,SAASmB,UAAU,MAAM,CAAC;AAAA,eAClDxmB,KAAK;AACZC,eAAOC,MAAM,yBAAyBF,IAAIuC,OAAO,EAAE;AACnD;AAAA,MAAA;AAGEkkB,UAAAA,iBAAAA,QAAQniB,SAASuhB,YAAY,GAAG;AAClC5lB,eAAOuC,MAAM,oCAAoC,GACjDvC,OAAOuC,MAAM,sCAAsC;AACnD;AAAA,MAAA;AAGE,UAAA;AACF,cAAMkkB,cAAc,MAAMpB,eAAehhB,SAAS6M,WAAWrK,MAAM;AACnE7G,eAAOuC,MAAM+iB,iBAAiBmB,aAAavV,SAAS,CAAC;AAAA,eAC9CnR,KAAK;AACZC,eAAOC,MAAM,8BAA8BF,IAAIuC,OAAO,EAAE,GACpDvC,IAAIuC,QAAQkW,SAAS,gBAAgB,KACvCxY,OAAOC,MAAM,qDAAqD;AAAA,MAAA;AAAA,IAEtE;AAAA,EACF;AAEJ;AAEA,SAAS8lB,uBAAuBL,SAAiB;AACvCxW,UAAAA,GAAG,UAAU,YAAY;AACzBU,UAAAA,cAAAA,QAAG0W,OAAOZ,OAAO,EAAEpE,MAAMxB,cAAAA,OAAI,GAEnC3gB,QAAQ2M,KAAK,GAAG;AAAA,EAAA,CACjB;AACH;AAEA,SAASuZ,eACPqB,WACAxV,WACArK,QACA;AACA,QAAM8f,OAAOC,MAAMC,QAAQH,SAAS,IAAIA,YAAY,CAACA,SAAS;AAC9D,MAAIC,KAAKte,WAAW;AACZ,UAAA,IAAI5J,MAAM,uBAAuB;AAGzC,QAAMqoB,YAAYH,KAAKzhB,IAAI,CAACyQ,KAAK3P,UAAoB;AAEnD,QADA+gB,iBAAiBpR,KAAK3P,OAAO2gB,IAAI,GAC7BzV,cAAc;AACT,aAAA;AAAA,QAACpD,QAAQ6H;AAAAA,MAAG;AAGrB,QAAIzE,cAAc,qBAAqB;AACrC,UAAI8V,2BAA2BrR,GAAG;AACzB,eAAA;AAAA,UAACsR,mBAAmBtR;AAAAA,QAAG;AAGhC,YAAM,IAAIlX,MAAM,sCAAsCyS,SAAS,EAAE;AAAA,IAAA;AAGnE,QAAIA,cAAc,mBAAmB;AACnC,UAAI8V,2BAA2BrR,GAAG;AACzB,eAAA;AAAA,UAAC9L,iBAAiB8L;AAAAA,QAAG;AAG9B,YAAM,IAAIlX,MAAM,sCAAsCyS,SAAS,EAAE;AAAA,IAAA;AAGnE,UAAM,IAAIzS,MAAM,yBAAyByS,SAAS,EAAE;AAAA,EAAA,CACrD;AAED,SAAOrK,OAAO+C,YAAYkd,SAAS,EAAE/c,OAAO;AAC9C;AAEA,SAASgd,iBAAiBpR,KAAc3P,OAAekhB,KAAgB;AAC/DC,QAAAA,WAAWD,IAAI7e,WAAW;AAE5B,MAAA,CAAC+e,+BAAczR,GAAG;AACpB,UAAM,IAAIlX,MAAM4oB,gBAAgB,qBAAqBrhB,OAAOmhB,QAAQ,CAAC;AAGnE,MAAA,CAACG,oBAAoB3R,GAAG;AAC1B,UAAM,IAAIlX,MAAM4oB,gBAAgB,+CAA+CrhB,OAAOmhB,QAAQ,CAAC;AAEnG;AAEA,SAASG,oBAAoB3R,KAAsC;AAE/DA,SAAAA,QAAQ,QACR,OAAOA,OAAQ,YACf,WAAWA,OACX,OAAQA,IAAY7L,SAAU;AAElC;AAEA,SAASkd,2BAA2BrR,KAAmD;AAC9E2R,SAAAA,oBAAoB3R,GAAG,KAAK,SAASA;AAC9C;AAEA,SAAS0R,gBAAgB/kB,SAAiB0D,OAAemhB,UAA2B;AAClF,SAAOA,WAAW,YAAY7kB,OAAO,KAAK,qBAAqB0D,KAAK,IAAI1D,OAAO;AACjF;AAEA,SAASgjB,iBACPvd,QACAmJ,WACQ;AACR,QAAMqW,SAAS;AAAA;AACf,MAAIrW,cAAc;AACT,WAAA;AAAA,MAAkBnJ,OAAOyf,QAAQtiB,IAAKuiB,CAAAA,QAAQA,IAAIpiB,EAAE,EAAE9E,KAAKgnB,MAAM,CAAC;AAG3E,MAAIrW,cAAc;AACT,WAAA;AAAA,MAAiBnJ,OAAOyf,QAAQtiB,IAAKuiB,CAAAA,QAAQA,IAAIpiB,EAAE,EAAE9E,KAAKgnB,MAAM,CAAC;AAI1E,QAAMG,UAAoB,IACpBC,UAAoB,CAAE;AAC5B,aAAWF,OAAO1f,OAAOyf;AACnBC,QAAIvW,cAAc,WACpByW,QAAQnkB,KAAKikB,IAAIpiB,EAAE,IAEnBqiB,QAAQlkB,KAAKikB,IAAIpiB,EAAE;AAIvB,SAAIqiB,QAAQrf,SAAS,KAAKsf,QAAQtf,SAAS,IAClC,CACL;AAAA,MAAiBqf,QAAQnnB,KAAKgnB,MAAM,CAAC,IACrC,4BAA4BA,MAAM,GAAGI,QAAQpnB,KAAKgnB,MAAM,CAAC,EAAE,EAC3DhnB,KAAK;AAAA;AAAA,CAAM,IACJmnB,QAAQrf,SAAS,IACnB;AAAA,MAAiBqf,QAAQnnB,KAAKgnB,MAAM,CAAC,KAGvC;AAAA,MAAkCI,QAAQpnB,KAAKgnB,MAAM,CAAC;AAC/D;AAEA,SAASzB,YAAY;AACnB,QAAM8B,gBAAgB,OAAO7a,KAAK5N,QAAQ0oB,QAAQ,IAAI,YAAY,OAG5DjoB,QADST,QAAQC,IAAI0oB,UAAU3oB,QAAQC,IAAI2oB,UAAUH,eACvC1H,MAAM,KAAK;AAExB,SAAA;AAAA,IAACiG,KADIvmB,KAAKooB,MAAAA,KAAW;AAAA,IACfpoB;AAAAA,EAAI;AACnB;ACpQA,MAAM2K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAwBX0d,yBAA4D;AAAA,EAChEzpB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACyF;AAAAA,IAAO,IAAI1F,KAAKQ,YACjB8nB,MAAMtoB,KAAKqJ,mBAAmB/D,IAAKiB,CAAAA,QAAQ,GAAGA,GAAG,EAAE;AAEzD,QAAI,CAAC+hB,IAAI7f;AACD,YAAA,IAAI5J,MAAM,+BAA+B;AAGjD,UAAMoI,SAASvB,UAAUsB,UAAYuc,EAAAA,MAAAA,EAAQjc,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAE3DgD,GAAAA,cAAcse,IAAItkB,OAAO,CAACukB,KAAK9iB,OAAO8iB,IAAItI,OAAOxa,EAAE,GAAGwB,OAAO+C,aAAa;AAC5E,QAAA;AACI,YAAA;AAAA,QAAC4d;AAAAA,MAAO,IAAI,MAAM5d,YAAYG,OAAO,GACrCqe,UAAUZ,QAAQpR,OAAQqR,CAAAA,QAAQA,IAAIvW,cAAc,QAAQ,EAAEhM,IAAKuiB,CAAQA,QAAAA,IAAIpiB,EAAE,GACjFgjB,WAAWH,IAAI9R,OAAQ/Q,CAAAA,OAAO,CAAC+iB,QAAQ5P,SAASnT,EAAE,CAAC;AACrD+iB,cAAQ/f,SAAS,KACnBrI,OAAOuC,MAAM,WAAW6lB,QAAQ/f,MAAM,IAAIigB,2BAAU,YAAYF,QAAQ/f,MAAM,CAAC,EAAE,GAG/EggB,SAAShgB,SAAS,KACpBrI,OAAOC,MACLuC,OAAMyF,IAAI,GAAGqgB,mBAAAA,QAAU,YAAYD,SAAShgB,MAAM,CAAC,eAAeggB,SAAS9nB,KAAK,IAAI,CAAC,EAAE,CACzF;AAAA,aAEKR,KAAK;AACZ,YAAM,IAAItB,MAAM,oBAAoB6pB,2BAAU,YAAYJ,IAAI7f,MAAM,CAAC;AAAA,EAAMtI,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC1F;AAEJ,GC5DMimB,iBAA4C;AAAA,EAChD/pB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCIMke,WAAYC,CAAwBA,QAAAA;AAE1BC,SAAAA,aAAa1M,OAAgBxZ,QAA2C;AACtF,QAAMmmB,aAA0E;AAAA,IAC9EC,YAAYpmB,OAAMqmB;AAAAA,IAClBC,KAAKtmB,OAAMqmB;AAAAA,IACXE,QAAQvmB,OAAMiM;AAAAA,IACdua,QAAQxmB,OAAMuW;AAAAA,IACdkQ,SAASzmB,OAAMsS;AAAAA,IACfoU,YAAYV;AAAAA,EAAAA,GAGRlgB,OAAOtG,KAAKC,UAAU+Z,OAAO,MAAM,CAAC;AAE1C,SAAOmN,kBAAAA,QAAS7gB,IAAI,EACjBpD,IAAI,CAAC8I,OAAO1H,GAAG4gB,QAA4B;AAE1C,UAAMkC,YAAY9iB,MAAM,IAAI0H,QAAQkZ,IAAI5gB,IAAI,CAAC;AAE3C0H,WAAAA,MAAMlG,SAAS,YACfshB,UAAUthB,SAAS,gBACnB,UAAUiF,KAAKqc,UAAUjhB,KAAK,IAEvB;AAAA,MAAC,GAAG6F;AAAAA,MAAOlG,MAAM;AAAA,IAAA,IAGnBkG;AAAAA,EACR,CAAA,EACA9I,IAAK8I,CAAAA,WACc2a,WAAW3a,MAAMlG,IAAI,KAAK0gB,UAC3Bxa,MAAMqS,GAAG,CAC3B,EACA9f,KAAK,EAAE;AACZ;ACxCA,MAAMgK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAoBX8e,sBAA8D;AAAA,EAClE7qB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACypB;AAAAA,MAAQhkB;AAAAA,IAAW1F,IAAAA,KAAKQ,YACzB,CAACmlB,KAAK,IAAI3lB,KAAKqJ,mBAAmB/D,IAAKiB,CAAAA,QAAQ,GAAGA,GAAG,EAAE;AAE7D,QAAI,CAACof;AACG,YAAA,IAAI9mB,MAAM,+BAA+B;AAGjD,UAAMoI,SAASvB,UAAUsB,UAAYuc,EAAAA,MAAAA,EAAQjc,OAAO;AAAA,MAAC5B;AAAAA,IAAQ,CAAA,IAAIsB,UAAU;AAEvE,QAAA;AACF,YAAM+O,MAAM,MAAM9O,OAAOc,YAAY4d,KAAK;AAC1C,UAAI,CAAC5P;AACH,cAAM,IAAIlX,MAAM,YAAY8mB,KAAK,YAAY;AAGxChjB,aAAAA,MAAM+mB,SAASZ,aAAa/S,KAAKnT,MAAK,IAAIR,KAAKC,UAAU0T,KAAK,MAAM,CAAC,CAAC;AAAA,aACtE5V,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA8BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EAC7D;AAEJ,GC9CMyJ,oBAAoB,eAEpBxB,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oDAQmCwB,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyBrE,IAAe,wBAAA;AAAA,EACbvN,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,YACkB;AAEZ,UAAA;AAAA,MACJypB;AAAAA,MACAhkB;AAAAA,MACAikB;AAAAA,MACAC;AAAAA,MACA,eAAeviB;AAAAA,IAAAA,IACb,MAAMsN,gBAAc3U,IAAI,GACtB;AAAA,MAACgH;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,MAAOinB;AAAAA,IAAa5pB,IAAAA,SACxC,CAACsQ,KAAK,IAAIvQ,KAAKqJ;AAErB,QAAI,CAACkH;AACG,YAAA,IAAI1R,MAAM,yBAAyB;AAGtCwI,kBACHjH,OAAO2L,KAAKnJ,OAAMuW,OAAO,wCAAwChN,iBAAiB,IAAI,CAAC;AAGzF,UAAM2d,iBAAiB,CAACpkB,SAClByB,iBAAiB,CAACwiB,SAClBziB,cAAc,CAAC0iB;AAEjBziB,QAAAA,kBAAkB,CAAC0iB,WAAWE,KAAKpkB;AAC/B,YAAA,IAAI9G,MACR,qFACF;AAGEirB,QAAAA,kBAAkB,CAACD,WAAWE,KAAKrkB;AAC/B,YAAA,IAAI7G,MACR,qFACF;AAGF,UAAMmrB,aAAahjB,UAAU;AAAA,MAACG;AAAAA,MAAgBD;AAAAA,IAAAA,CAAY,EAAEqc,MAAAA,GACtD;AAAA,MAAC7d,SAASukB;AAAAA,MAAiBtkB,WAAWukB;AAAAA,QAAqBF,WAAW1iB,OAAAA,GAEtEL,SAAS+iB,WAAW1iB,OAAO;AAAA,MAC/B3B,WAAWgkB,WAAWO;AAAAA,MACtBxkB,SAASA,WAAWukB;AAAAA,MACpB5iB,YAAYA,cAAc8E;AAAAA,IAAAA,CAC3B;AAEG,QAAA;AACF,YAAM4a,OAAO,MAAM9f,OAAOgB,MAAMsI,KAAK;AACrC,UAAI,CAACwW;AACG,cAAA,IAAIloB,MAAM,2BAA2B;AAGtC8D,aAAAA,MAAM+mB,SAASZ,aAAa/B,MAAMnkB,MAAK,IAAIR,KAAKC,UAAU0kB,MAAM,MAAM,CAAC,CAAC;AAAA,aACxE5mB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAyBsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EACxD;AAEJ;AAEA,SAASiS,gBAAc3U,MAAiD;AAEhEmqB,QAAAA,qBAAqB5qB,QAAQC,IAAI4qB;AACvC,SAAOxV,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrD6W,OAAO,UAAU;AAAA,IAAC3T,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAM,EAClD4Q,OAAO,WAAW;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAS,EAClC2T,OAAO,WAAW;AAAA,IAAC3T,MAAM;AAAA,EAAA,CAAS,EAClC2T,OAAO,aAAa;AAAA,IAAC3T,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAM,EACrD4Q,OAAO,eAAe;AAAA,IAAC3T,MAAM;AAAA,IAAU+C,SAASkf;AAAAA,EAAmB,CAAA,EAAE/gB;AAC1E;ACnHA,MAAMsB,gBAAc,iEAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAyBX0f,6BAAiD;AAAA,EACrDzrB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,sBAAyC;AAAA,EAAA,CAAA,GAEvDgL,QAAQjL,MAAMC,OAAO;AAEpC,GCtCM0K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAuBJ2f,cAAoC;AAAA,EAC/C1rB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EAAA,UACbC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,iBAA+B;AAAA,EAAA,CAAA,GAE7CgL,QAAQjL,MAAMC,OAAO;AAEpC,GC3BM0K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeX4f,0BAAgD;AAAA,EACpD3rB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAkDC,aACnD,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,sBAAuC;AAAA,MAErDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GChCMA,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgCX6f,0BAAgD;AAAA,EACpD5rB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXK,OAAO;AAAA,EACPH,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAyBC,aAC1B,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,sBAAuC;AAAA,MAErDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GC3CM8f,eAA0C;AAAA,EAC9C7rB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCDMC,aAAW;AAAA;AAAA;AAAA,GAKX+f,yBAA+C;AAAA,EACnD9rB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXK,OAAO;AAAA,EACPH,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEY,MAAM,QAAO,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAA,qBAAsC;AAAA,MAEpDgL,QAAQjL,MAAMC,OAAO;AAAA,EAElC0K,UAAAA;AACF,GCtBMggB,oBAA0C;AAAA,EAC9C/rB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtBgH,SAASD,aAET;AAAA,MAACrB;AAAAA,IAAAA,IAAasB,OAAOK,OAAO;AAClC,QAAI,CAAC3B;AACG,YAAA,IAAI9G,MAAM,qBAAqB;AAKvC,UAAM+rB,YAAY,wCAFG,MAAM3jB,OAAO4jB,SAASC,QAAQnlB,SAAS,KAAM,CAAA,GAC/BolB,kBAAkB,UACkB,YAAYplB,SAAS;AAE5FvF,WAAOuC,MAAM,WAAWioB,SAAS,EAAE,GACnCI,sBAAKJ,SAAS;AAAA,EAAA;AAElB,GCrBMK,oBAA0C;AAAA,EAC9CrsB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,IAAa/G,IAAAA,SACd,CAACrB,IAAI,IAAIoB,KAAKqJ,oBACdpC,SAASD,UAAAA,GAETkkB,SAAS,MAAMC,gBAAcvsB,MAAMqB,OAAO;AAC5C,QAAA;AACIgH,YAAAA,OACHsc,MAAM,EACNjc,OAAO;AAAA,QAACD,YAAY;AAAA,MAAa,CAAA,EACjCkH,QAAQ;AAAA,QAACC,QAAQ;AAAA,QAAUG,KAAK,UAAUuc,MAAM;AAAA,MAAA,CAAG;AAAA,aAC/C/qB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAA0BsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAAA,EACzD;AAEJ;AAEA,eAAeyoB,gBAAchR,WAA+Bla,SAA4B;AACtF,QAAMmrB,gBAAgBjR,aAAaA,UAAUlN,YAAAA,GACvC;AAAA,IAACtB;AAAAA,IAAQ3E;AAAAA,EAAAA,IAAa/G,SAGtBorB,QAAQ,MAFCrkB,YAGZuc,QACAjc,OAAO;AAAA,IAACD,YAAY;AAAA,EAAa,CAAA,EACjCkH,QAAgB;AAAA,IAACI,KAAK;AAAA,IAAUjG,MAAM;AAAA,EAAA,CAAK;AAE9C,MAAI0iB,eAAe;AACXvd,UAAAA,WAAWwd,MAAM7U,OAAQ8U,CAASA,SAAAA,KAAK1sB,KAAKqO,YAAY,MAAMme,aAAa,EAAE,CAAC;AACpF,QAAI,CAACvd;AACH,YAAM,IAAIhP,MAAM,mBAAmBsb,SAAS,aAAa;AAG3D,WAAOtM,SAASpI;AAAAA,EAAAA;AAGZqI,QAAAA,UAAUud,MAAM/lB,IAAKgmB,CAAU,UAAA;AAAA,IAAC/iB,OAAO+iB,KAAK7lB;AAAAA,IAAI7G,MAAM0sB,KAAK1sB;AAAAA,EAAAA,EAAM;AACvE,SAAO+M,OAAOM,OAAO;AAAA,IACnBvJ,SAAS;AAAA,IACTwF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;AClDA,MAAMyd,YAAuC;AAAA,EAC3C3sB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GCHM8gB,0BAAgD;AAAA,EACpD5sB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtB,CAACwrB,SAAS,IAAIzrB,KAAKqJ,oBACnBpC,SAASD,UAAU;AAErB0kB,QAAAA;AACA,QAAA;AACQ,gBAAA,MAAMzkB,OAAOsH,QAAyB;AAAA,QAACI,KAAK,mBAAmB8c,SAAS;AAAA,MAAA,CAAG;AAAA,aAC9EtrB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAmCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG5D,UAAA;AAAA,MAACX;AAAAA,MAAW4pB;AAAAA,MAAYC;AAAAA,MAAYC;AAAAA,MAAeC;AAAAA,IAAAA,IAAcJ;AAUvE,QARAtrB,OAAOuC,MAAM,SAASZ,SAAS,EAAE,GACjC3B,OAAOuC,MAAM,WAAWopB,UAAUL,OAAO,CAAC,EAAE,GAC5CtrB,OAAOuC,MAAM,gBAAgBgpB,UAAU,EAAE,GAErCD,QAAQM,aACV5rB,OAAOuC,MAAM,YAAYspB,cAAcP,OAAO,CAAC,EAAE,GAG/C,CAACI,eAAe,CAACD,iBAAiBA,kBAAkB,SAAS;AAC/D,YAAMlf,OAAOif,aAAa;AAAA;AAAA,EAAUA,UAAU;AAAA;AAAA,IAAY;AACnDjpB,aAAAA,MAAM,kBAAkBgK,IAAI,EAAE;AAAA,IAAA;AAAA,EACvC;AAEJ;AAIO,SAASsf,cACdP,SACAre,UAAmC,IAC3B;AACF,QAAA;AAAA,IAAC6e;AAAAA,MAAe7e,SAChB;AAAA,IAAC5H;AAAAA,IAAIomB;AAAAA,IAAeF;AAAAA,EAAAA,IAAcD,SAClCS,OAAOD,cAAc,8BAA8BzmB,EAAE,oBAAoB;AAC/E,UAAQomB,eAAa;AAAA,IACnB,KAAK;AACI,aAAA,QAAQF,UAAU,IAAIQ,IAAI;AAAA,IACnC,KAAK;AACI,aAAA;AAAA,IACT,KAAK;AACI,aAAA;AAAA,EAET;AAGK,SAAA;AACT;AAEO,SAASJ,UAAUL,SAAkC;AAC1D,SAAIA,QAAQM,YACH,WAGLN,QAAQI,aACH,gBAGF;AACT;AC5DA,MAAMM,sBAA2D;AAAA,EAC/DxtB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,IAAAA,IAAa/G,SACdM,QAAQP,KAAKQ,YACb,CAAC5B,IAAI,IAAIoB,KAAKqJ,oBACdpC,SAASD,UAAU,GAEnBkkB,SAAS,MAAMC,cAAcvsB,MAAMqB,OAAO;AAChD,QAAIosB,UACAC;AACA,QAAA;AACS,iBAAA,MAAMrlB,OAAOsH,QAAuB;AAAA,QAACI,KAAK,UAAUuc,MAAM;AAAA,MAAY,CAAA,GACjFoB,WAAW,MAAMrlB,OAAOsH,QAA2B;AAAA,QAACI,KAAK,UAAUuc,MAAM;AAAA,MAAA,CAAY;AAAA,aAC9E/qB,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAgCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAGzD6pB,UAAAA,kBAAkBC,yBAAQF,UAAU,WAAW,GAC/CG,YAAYJ,SAAS/mB,IAAKwG,CAAsD,SAAA;AAAA,MACpF,GAAGA;AAAAA,MACHwgB,UAAUC,gBAAgBzgB,IAAIrG,EAAE;AAAA,IAChC,EAAA,GAEIinB,gBAAgBL,SAAS5jB,SAAS;AAC9B5B,cAAAA,QAAQ,CAACnE,SAASgE,MAAM;AAChCimB,mBAAajqB,SAASzC,SAAS;AAAA,QAAC2sB,UAAUrsB,MAAMqsB;AAAAA,MAAS,CAAA,GACzDC,eAAe5sB,SAASysB,kBAAkBhmB,CAAC;AAAA,IAAA,CAC5C;AAAA,EAAA;AAEL;AAIA,eAAeykB,cAAchR,WAA+Bla,SAA4B;AACtF,QAAMmrB,gBAAgBjR,aAAaA,UAAUlN,YAAAA,GACvC;AAAA,IAACtB;AAAAA,IAAQ3E;AAAAA,EAAAA,IAAa/G,SAGtBorB,QAAQ,MAFCrkB,YAGZuc,QACAjc,OAAO;AAAA,IAACD,YAAY;AAAA,EAAa,CAAA,EACjCkH,QAAgB;AAAA,IAACI,KAAK;AAAA,IAAUjG,MAAM;AAAA,EAAA,CAAK;AAE9C,MAAI0iB,eAAe;AACXvd,UAAAA,WAAWwd,MAAM7U,OAAQ8U,CAASA,SAAAA,KAAK1sB,KAAKqO,YAAY,MAAMme,aAAa,EAAE,CAAC;AACpF,QAAI,CAACvd;AACH,YAAM,IAAIhP,MAAM,mBAAmBsb,SAAS,aAAa;AAG3D,WAAOtM,SAASpI;AAAAA,EAAAA;AAGlB,MAAI4lB,MAAM5iB,WAAW;AACb,UAAA,IAAI5J,MAAM,+BAA+B;AAGjD,MAAIwsB,MAAM5iB,WAAW;AACZ4iB,WAAAA,MAAM,CAAC,EAAE5lB;AAGZqI,QAAAA,UAAUud,MAAM/lB,IAAKgmB,CAAU,UAAA;AAAA,IAAC/iB,OAAO+iB,KAAK7lB;AAAAA,IAAI7G,MAAM0sB,KAAK1sB;AAAAA,EAAAA,EAAM;AACvE,SAAO+M,OAAOM,OAAO;AAAA,IACnBvJ,SAAS;AAAA,IACTwF,MAAM;AAAA,IACN4F;AAAAA,EAAAA,CACD;AACH;AAEA,SAAS+e,eAAe5sB,SAA4B6sB,MAAe;AAC5DA,UACH7sB,QAAQG,OAAOuC,MAAM;AAAA,CAAO;AAEhC;AAEA,SAASgqB,aACPjqB,SACAzC,SACAoN,SACA;AACM,QAAA;AAAA,IAACuf;AAAAA,MAAYvf,SACb;AAAA,IAACjN;AAAAA,IAAQwC,OAAAA;AAAAA,EAAAA,IAAS3C;AAExBG,SAAOuC,MAAM,SAASD,QAAQX,SAAS,EAAE,GACzC3B,OAAOuC,MAAM,WAAWD,QAAQ0F,MAAM,EAAE,GACxChI,OAAOuC,MAAM,gBAAgBD,QAAQipB,UAAU,EAAE,GAE7CjpB,QAAQqqB,eAAe,KACzB3sB,OAAOuC,MAAM,aAAaD,QAAQqqB,YAAY,EAAE,GAG9CH,aACFxsB,OAAOuC,MAAM,UAAU,GACvBvC,OAAOuC,MAAMqqB,kBAAQ5qB,KAAK6G,MAAMvG,QAAQuqB,OAAO,GAAG;AAAA,IAACC,QAAQ;AAAA,EAAK,CAAA,CAAC,IAG/DN,YAAYlqB,QAAQ4pB,aACtBlsB,OAAOuC,MAAM,WAAW,GACxBD,QAAQ4pB,SAASzlB,QAAS6kB,CAAY,YAAA;AAEpC,UAAMyB,SAAS,MADFzB,QAAQ3pB,UAAUwX,QAAQ,WAAW,GAAG,CAC5B;AAEzB,QAAImS,QAAQI;AACHnpB,aAAAA,MAAM,GAAGwqB,MAAM,IAAIvqB,OAAMuW,OAAO,SAAS,CAAC,EAAE;AAAA,aAC1CuS,QAAQM,WAAW;AACtBoB,YAAAA,UAAUnB,cAAcP,SAAS;AAAA,QAACQ,aAAa;AAAA,MAAA,CAAK;AACnDvpB,aAAAA,MAAM,GAAGwqB,MAAM,IAAIvqB,OAAMuW,OAAO,YAAYiU,OAAO,EAAE,CAAC,EAAE;AAAA,IACjE;AACSzqB,aAAAA,MAAM,GAAGwqB,MAAM,kBAAkBzB,QAAQC,UAAU,KAAKD,QAAQ2B,QAAQ,KAAK;AAAA,EAEvF,CAAA,IAIHjtB,OAAOuC,MAAM,EAAE;AACjB;AC/HA,MAAM2qB,mBAAyC;AAAA,EAC7C1uB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG,UAAU;AAAA,EACVD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,IAAAA,IAAUH,SACtBgH,SAASD,UAAU;AAErBqkB,QAAAA;AACA,QAAA;AACFA,cAAQ,MAAMpkB,OACXsc,MAAM,EACNjc,OAAO;AAAA,QAACD,YAAY;AAAA,MAAa,CAAA,EACjCkH,QAAgB;AAAA,QAACI,KAAK;AAAA,MAAA,CAAS;AAAA,aAC3BxO,KAAK;AACZ,YAAM,IAAItB,MAAM;AAAA,EAAgCsB,IAAIuC,OAAO,EAAE;AAAA,IAAA;AAG/D2oB,UAAMxkB,QAASykB,CAAS,SAAA;AACtBlrB,aAAOuC,MAAM,SAAS2oB,KAAK1sB,IAAI,EAAE,GACjCwB,OAAOuC,MAAM,YAAY2oB,KAAK5lB,OAAO,EAAE,GACvCtF,OAAOuC,MAAM,QAAQ2oB,KAAKvZ,GAAG,EAAE,GAE3BuZ,KAAKpjB,SAAS,eAChB9H,OAAOuC,MAAM,gBAAgB2oB,KAAKiC,UAAU,EAAE,GAE1CjC,KAAK5gB,eACPtK,OAAOuC,MAAM,gBAAgB2oB,KAAK5gB,WAAW,EAAE,IAInDtK,OAAOuC,MAAM,EAAE;AAAA,IAAA,CAChB;AAAA,EAAA;AAEL,GCtCM+H,gBAAc,yEAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcX6iB,yBAA+C;AAAA,EACnD5uB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACF,qBAAAA;AAAAA,IAAAA,IAAuB,MAAM;;QAC9B0tB,eAAe,MAAM1tB,qBAAoBC,MAAMC,OAAO;AACxDwtB,QAAAA;AACIA,YAAAA;AAEDA,WAAAA;AAAAA,EAAAA;AAEX;AChCA,IAAe,gBAAA;AAAA,EACb7uB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACLO,MAAMgjB,uBAAuB,cACvBC,8BAA8B,CAAC,OAAO,MAAM,MAAM,KAAK,GCDvDC,kBAAkBA,CAAC;AAAA,EAC9BC;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAOMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcxoB,IAAKyoB,CAAAA,MAAM3rB,KAAKC,UAAU0rB,CAAC,CAAC,EAAEptB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCjBKqtB,gBAAgBA,CAAC;AAAA,EAC5BH;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA,YAGMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcxoB,IAAKyoB,CAAAA,MAAM3rB,KAAKC,UAAU0rB,CAAC,CAAC,EAAEptB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCbKstB,cAAcA,CAAC;AAAA,EAC1BJ;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcxoB,IAAKyoB,CAAAA,MAAM3rB,KAAKC,UAAU0rB,CAAC,CAAC,EAAEptB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GChBKutB,aAAaA,CAAC;AAAA,EACzBL;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcxoB,IAAKyoB,CAAAA,MAAM3rB,KAAKC,UAAU0rB,CAAC,CAAC,EAAEptB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GChBKwtB,cAAcA,CAAC;AAAA,EAC1BN;AAAAA,EACAC;AAIF,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMMD,aAAa;AAAA,EAEvBC,cAAcrlB,SAAS,IACnB,qBAAqBqlB,cAAcxoB,IAAKyoB,CAAAA,MAAM3rB,KAAKC,UAAU0rB,CAAC,CAAC,EAAEptB,KAAK,IAAI,CAAC;AAAA,IAC3E,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GCFFgK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAYXyjB,YAAY,CAChB;AAAA,EAACxvB,MAAM;AAAA,EAA6CyvB,UAAUL;AAAa,GAC3E;AAAA,EAACpvB,MAAM;AAAA,EAAyByvB,UAAUH;AAAU,GACpD;AAAA,EAACtvB,MAAM;AAAA,EAAkByvB,UAAUJ;AAAW,GAC9C;AAAA,EAACrvB,MAAM;AAAA,EAAyCyvB,UAAUF;AAAW,GACrE;AAAA,EACEvvB,MAAM;AAAA,EACNyvB,UAAUT;AACZ,CAAC,GAGGU,yBAAqE;AAAA,EACzE1vB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAACG;AAAAA,MAAQuL;AAAAA,MAAQrL;AAAAA,MAASsC,OAAAA;AAAAA,IAAAA,IAAS3C;AAErC,QAAA,CAACyX,KAAK,IAAI1X,KAAKqJ;AAEZ,WAAA,CAACqO,OAAO2E,KAAK;AACV,cAAA,MAAM1Q,OAAOM,OAAO;AAAA,QAC1B/D,MAAM;AAAA,QACNqmB,QAAQ;AAAA,QACR7rB,SAAS;AAAA,MAAA,CACV,GACIgV,MAAM2E,UACTjc,OAAOC,MAAMuC,OAAMyF,IAAI,sBAAsB,CAAC;AAG5CgY,UAAAA,SAAQ,MAAM1U,OAAOM,OAAO;AAAA,MAChC/D,MAAM;AAAA,MACNqmB,QAAQ;AAAA,MACR7rB,SAAS;AAAA,IAAA,CACV,GAEK8rB,kBAAkBC,OAAOC,YAAYN,UAAU9oB,IAAKyoB,CAAM,MAAA,CAACA,EAAEnvB,MAAMmvB,CAAC,CAAC,CAAC,GACtEM,WAAW,MAAM1iB,OAAOM,OAAO;AAAA,MACnC/D,MAAM;AAAA,MACNxF,SAAS;AAAA,MACToL,SAASsgB,UAAU9oB,IAAKqpB,CAAqB,qBAAA;AAAA,QAC3C/vB,MAAM+vB,gBAAgB/vB;AAAAA,QACtB2J,OAAOomB,gBAAgB/vB;AAAAA,MAAAA,EACvB;AAAA,IAAA,CACH,GAEKgwB,cAAcC,gBAAAA,QAAOnX,MAAMzK,YAAY,CAAC,EAC3CsM,QAAQ,QAAQ,GAAG,EACnBA,QAAQ,eAAe,EAAE,GAEtBuV,UAAU/tB,cAAKJ,QAAAA,KAAKL,SAASotB,sBAAsBkB,WAAW;AACpE,QAAIlY,gBAAWoY,OAAO,KAElB,CAAE,MAAMnjB,OAAOM,OAAO;AAAA,MACpB/D,MAAM;AAAA,MACNxF,SAAS,uBAAuBE,OAAMgE,KAAKkoB,OAAO,CAAC;AAAA,MACnD7jB,SAAS;AAAA,IAAA,CACV;AAED;AAGJsK,SAAAA,UAAUuZ,SAAS;AAAA,MAACptB,WAAW;AAAA,IAAA,CAAK;AAEpC,UAAMqtB,oBAAoBP,gBAAgBH,QAAQ,EAAEA,YAAYL,eAAe;AAAA,MAC7EH,eAAenW;AAAAA,MACfoW,eAAezN,OACZC,MAAM,GAAG,EACThb,IAAKyoB,CAAMA,MAAAA,EAAE1R,MAAM,EACnB7F,OAAOyC,OAAO;AAAA,IAClB,CAAA,GAEK+V,iBAAiBjuB,cAAAA,QAAKJ,KAAKmuB,SAAS,UAAU;AAEpD,UAAM3sB,GAAAA,UAAU6sB,gBAAgBD,gBAAgB,GAEhD3uB,OAAOuC,MAAM,GACbvC,OAAOuC,MAAM,GAAGC,OAAMiM,MAAM,QAAG,CAAC,qBAAqB,GACrDzO,OAAOuC,MAAAA,GACPvC,OAAOuC,MAAM,aAAa,GAC1BvC,OAAOuC,MACL,QAAQC,OAAMsS,KACZ8Z,cACF,CAAC,6DACH,GACA5uB,OAAOuC,MACL;AAAA,IAAkCC,OAAMsS,KACtC,wBAAwB0Z,WAAW,6CACrC,CAAC,IACH,GACAxuB,OAAOuC,MACL;AAAA,KAAiDC,OAAMsS,KACrD,wBAAwB0Z,WAAW,yDACrC,CAAC,IACH,GACAxuB,OAAOuC,SACPvC,OAAOuC,MACL,+DAAwDC,OAAMsS,KAC5D,0DACF,CAAC,EACH;AAAA,EAAA;AAEJ;AC3FgB+Z,SAAAA,uBACd3uB,SACAutB,eAC2B;AAC3B,SAAO,CAACA,eAAe9sB,cAAAA,QAAKJ,KAAKktB,eAAe,OAAO,CAAC,EAAEqB,QAASC,CAAAA,aACjExB,4BAA4BroB,IAAKugB,CAAQ,QAAA;AACvC,UAAMuJ,eAAeruB,cAAAA,QAAKJ,KAAK+sB,sBAAsB,GAAGyB,QAAQ,IAAItJ,GAAG,EAAE,GACnEwJ,eAAetuB,cAAKL,QAAAA,QAAQJ,SAAS8uB,YAAY;AACnDE,QAAAA;AACA,QAAA;AAEFA,YAAMxgB,QAAQugB,YAAY;AAAA,aACnBlvB,KAAK;AACZ,UAAIA,IAAIoM,SAAS;AACf,cAAM,IAAI1N,MAAM,UAAUsB,IAAIuC,OAAO,GAAG;AAAA,IAAA;AAGrC,WAAA;AAAA,MAAC0sB;AAAAA,MAAcC;AAAAA,MAAcC;AAAAA,IAAG;AAAA,EAAA,CACxC,CACH;AACF;AASO,SAASC,0BACdC,QAC6C;AACzC,MAAA,OAAOA,OAAOF,MAAQ,OAAe,CAAC9H,uBAAAA,QAAcgI,OAAOF,IAAIrkB,OAAO;AACjE,WAAA;AAGHqkB,QAAAA,MAAME,OAAOF,IAAIrkB;AACvB,SAAO,OAAOqkB,IAAI5X,SAAU,YAAY4X,IAAIG,YAAYxhB;AAC1D;AClEA,MAAMtD,aAAW,IAEX+kB,uBAA6C;AAAA,EACjD9wB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO6kB,GAAG1vB,YAAY;AACtB,UAAA;AAAA,MAACK;AAAAA,MAASF;AAAAA,MAAQwC,OAAAA;AAAAA,IAAAA,IAAS3C;AAC7B,QAAA;AACI2vB,YAAAA,aAAa,MAAMC,kBAAkBvvB,OAAO;AAE9CsvB,UAAAA,WAAWnnB,WAAW,GAAG;AAC3BrI,eAAOuC,MAAM,yDAAyD,GACtEvC,OAAOuC,MACL;AAAA,MAASC,OAAMiM,MAAM,kCAAoC,CAAC,4BAC5D;AACA;AAAA,MAAA;AAGI0I,YAAAA,QAAQ,IAAIC,0BAAM;AAAA,QACtBE,OAAO,SAASkY,WAAWnnB,MAAM;AAAA,QACjCgP,SAAS,CACP;AAAA,UAAC7Y,MAAM;AAAA,UAAM8Y,OAAO;AAAA,UAAMC,WAAW;AAAA,QAAA,GACrC;AAAA,UAAC/Y,MAAM;AAAA,UAAS8Y,OAAO;AAAA,UAASC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAErD;AAEDiY,iBAAW/oB,QAASipB,CAAqB,qBAAA;AACvCvY,cAAMK,OAAO;AAAA,UAACnS,IAAIqqB,iBAAiBrqB;AAAAA,UAAIiS,OAAOoY,iBAAiBC,UAAUrY;AAAAA,QAAAA,CAAM;AAAA,MAAA,CAChF,GACDH,MAAMQ,cACN3X,OAAOuC,MAAM,sDAAsD;AAAA,aAC5DtC,OAAO;AACVA,UAAAA,MAAMkM,SAAS,UAAU;AAC3BnM,eAAOuC,MAAM,2CAA2C,GACxDvC,OAAOuC,MACL;AAAA,MAASC,OAAMiM,MAAM,kCAAoC,CAAC,4BAC5D;AACA;AAAA,MAAA;AAEF,YAAM,IAAIhQ,MAAM,+CAA+CwB,MAAMqC,OAAO,EAAE;AAAA,IAAA;AAAA,EAChF;AAEJ;AAmBA,eAAsBmtB,kBAAkBvvB,SAA+C;AACjF0vB,MAAAA;AAEFA,eAAaC,KAAAA,SAAS;AAAA,IACpBtN,QAAQ,OAAOpjB,QAAQuC,QAAQkD,MAAM,CAAC,CAAC;AAAA,IACvCkrB,WAAW;AAAA,MAAC,kBAAkB;AAAA,IAAA;AAAA,EAC/B,CAAA,EAAEF;AAGCG,QAAAA,gBAAgBpvB,sBAAKJ,KAAKL,SAASotB,oBAAoB,GACvD0C,mBAAmB,MAAMC,GAAAA,QAAQF,eAAe;AAAA,IAACG,eAAe;AAAA,EAAA,CAAK,GAErEV,aAAkC,CAAC;AACzC,aAAWW,SAASH,kBAAkB;AACpC,UAAMI,YAAYD,MAAMnN,YAAgBmN,IAAAA,MAAM3xB,OAAO6xB,+BAA+BF,MAAM3xB,IAAI,GACxF8xB,aAAazB,uBAAuB3uB,SAASkwB,SAAS,EAAEha,OAAO+Y,yBAAyB;AAE9F,eAAWoB,aAAaD;AACtBd,iBAAWhsB,KAAK;AAAA,QACd6B,IAAI+qB;AAAAA,QACJT,WAAWY,UAAUrB,IAAIrkB;AAAAA,MAAAA,CAC1B;AAAA,EAAA;AAID+kB,SAAAA,cACFA,cAGKJ;AACT;AAEA,SAASa,+BAA+Bze,UAAkB;AAExD,SAAO2b,4BAA4B3pB,OACjC,CAACpF,MAAMinB,QAASjnB,KAAKgyB,SAAS,IAAI/K,GAAG,EAAE,IAAI9kB,cAAAA,QAAKqR,SAASxT,MAAM,IAAIinB,GAAG,EAAE,IAAIjnB,MAC5EoT,QACF;AACF;ACjHA,IAAe,iBAAA;AAAA,EACbpT,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,SAASmmB,aAAa9vB,OAAoB;AACpC,MAAA,CAACimB,MAAMC,QAAQlmB,KAAI;AACf,UAAA,IAAIlC,MAAM,sBAAsB;AAGxC,SAAOkC,MAAKiD,OAAe,CAAC2e,QAAQmO,SAASpqB,MAAM;AACjD,QAAIqqB,MAAAA,eAAeD,OAAO;AACjB,aAAA,GAAGnO,MAAM,IAAImO,OAAO;AAGzBE,QAAAA,mBAAaF,OAAO,KAAKA,QAAQG;AACnC,aAAO,GAAGtO,MAAM,WAAWmO,QAAQG,IAAI;AAGrCC,QAAAA,MAAAA,aAAaJ,OAAO,GAAG;AACnB,YAAA,CAACK,MAAMC,EAAE,IAAIN;AACnB,aAAO,GAAGnO,MAAM,IAAIwO,IAAI,IAAIC,EAAE;AAAA,IAAA;AAGhC,QAAI,OAAON,WAAY;AAEd,aAAA,GAAGnO,MAAM,GADEjc,MAAM,IAAI,KAAK,GACL,GAAGoqB,OAAO;AAGxC,UAAM,IAAIjyB,MAAM,8BAA8BuD,KAAKC,UAAUyuB,OAAO,CAAC,IAAI;AAAA,KACxE,EAAE;AACP;AAgBO,MAAMO,eAAeA,CAACC,WAA2C,CAAIC,GAAAA,QAAQ,MAC3E9C,OAAO+C,QAAQF,QAAQ,EAC3BhsB,IAAI,CAAC,CAAC4jB,KAAKuI,KAAK,MACfprB,KAAKJ,IAAIijB,IAAIzgB,SAAS8oB,QAAQ,GAAGF,aAAaI,MAAMH,UAAUC,QAAQ,CAAC,CAAC,CAC1E,EACCvtB,OAAO,CAACiC,KAAKgY,SAAUA,OAAOhY,MAAMgY,OAAOhY,KAAM,CAAC,GAc1CyrB,aAAaA,CAAwB;AAAA,EAChDC,MAAAA,QAAO,CAAC;AAAA,EACRC;AAAAA,EACAC,QAAAA,UAAS;AAAA,EACTC,UAAUC,YAAYA,CAAC;AAAA,IAACC;AAAAA,EAAAA,MAAWA;AAAAA,EACnCC;AACa,MAAc;AACrBT,QAAAA,UAAU/C,OAAO+C,QAAQG,KAAI;AAEnC,SAAOH,QACJlsB,IAAI,CAAC,CAAC4jB,KAAKuI,KAAK,GAAGrrB,UAAU;AAC5B,UAAM8rB,SAAS9rB,UAAUorB,QAAQ/oB,SAAS,GACpC0pB,aAAa,GAAGN,OAAM,GAAGK,SAAS,OAAO,SAAI,IAC7CE,SAASL,UAAUN,KAAK,GAExBY,SAASX,WAAW;AAAA,MACxBC,MAAMF,MAAMH;AAAAA,MACZM;AAAAA,MACAC,QAAQM;AAAAA,MACRL,UAAUC;AAAAA,MACVE;AAAAA,IAAAA,CACD;AAED,QAAI,CAACG,QAAQ3pB;AAEX,aAAO,CADS,GAAGopB,OAAM,GAAGK,SAAS,WAAM,QAAG,UAAKhJ,GAAG,IACrCmJ,MAAM,EAAE7b,OAAOyC,OAAO,EAAEtY,KAAK;AAAA,CAAI;AAG9C,UAAA,CAAC2xB,OAAO,GAAGC,IAAI,IAAIH,QACnBI,eAAe,IAAIC,OAAOb,gBAAgBC,QAAOppB,SAASygB,IAAIzgB,MAAM,GACpEiqB,QAAQR,SAAS,WAAM,UACvBS,oBAAoB,IAAIF,OAAOb,gBAAgBC,QAAOppB,SAAS,CAAC,GAEhEmqB,eAAe,GAAGf,OAAM,GAAGa,KAAK,UAAKxJ,GAAG,IAAIsJ,YAAY,IAAIP,WAAWK,KAAK,CAAC,IAC7EO,qBAAqBN,KACxBjtB,IAAKwtB,CAAAA,WAAW,GAAGX,UAAU,GAAGQ,iBAAiB,IAAIV,WAAWa,MAAM,CAAC,EAAE,EACzEnyB,KAAK;AAAA,CAAI;AAGL,WAAA,CADS,CAACiyB,cAAcC,kBAAkB,EAAErc,OAAOyC,OAAO,EAAEtY,KAAK;AAAA,CAAI,GAC3D0xB,MAAM,EAAE7b,OAAOyC,OAAO,EAAEtY,KAAK;AAAA,CAAI;AAAA,EACnD,CAAA,EACAA,KAAK;AAAA,CAAI;AACd;AAMO,SAASoyB,cAA2Cf,OAA2B;AACpF,QAAMgB,OAAmB,CAAC;AAGjBC,WAAAA,QAAQtB,OAAYuB,OAAmBF,MAAM;AAEhD,QAAA,CAACrB,MAAK5wB,KAAK0H,QAAQ;AAChByqB,WAAKlB,UAAOkB,KAAKlB,QAAQ,CAAA,IAG9BkB,KAAKlB,MAAMpuB,KAAK+tB,KAAI;AACpB;AAAA,IAAA;AAGI,UAAA,CAACxrB,SAAS,GAAGosB,IAAI,IAAIZ,MAAK5wB,MAC1BmoB,MAAM2H,aAAa,CAAC1qB,OAAO,CAAC;AAG7B+sB,SAAK5B,aAAU4B,KAAK5B,WAAW,CAC9BpI,IAAAA,OAAOgK,KAAK5B,aAAW4B,KAAK5B,SAASpI,GAAG,IAAI,KAElD+J,QAAQ;AAAA,MAAC,GAAGtB;AAAAA,MAAM5wB,MAAMwxB;AAAAA,IAAAA,GAAOW,KAAK5B,SAASpI,GAAG,CAAC;AAAA,EAAA;AAGxCyI,aAAAA,SAAQK,MAAOiB,SAAQtB,KAAI;AAC/BqB,SAAAA;AACT;AC/HA,MAAMG,QAAQC,gBAAO,CAAC;AASf,SAASC,aAAa;AAAA,EAC3BzwB,OAAAA;AAAAA,EACA0wB;AAAAA,EACAvD;AAAAA,EACAwD,aAAa;AACwD,GAAW;AAChF,UAAQvM,MAAMC,QAAQqM,OAAO,IAAIA,UAAU,CAACA,OAAO,GAChDhuB,IAAKkuB,CAAAA,iBACAA,aAAatrB,SAAS,gBACjB,CACL,CACEurB,MAAM,eAAe,QAAQ7wB,MAAK,GAClC,OAAO4wB,aAAa/tB,KAAO,MAAc,OAAO7C,OAAM0W,UAAUka,aAAa/tB,EAAE,CAAC,EAE/E+Q,OAAOyC,OAAO,EACdtY,KAAK,GAAG,GACXkxB,OACEwB,aAAa;AAAA,IACXzwB,OAAAA;AAAAA,IACA0wB,SAASE,aAAatM;AAAAA,IACtB6I;AAAAA,IACAwD;AAAAA,EAAAA,CACD,CACH,CAAC,EACD5yB,KAAK;AAAA;AAAA,CAAM,IAER+yB,qBAAqB;AAAA,IAC1B9wB,OAAAA;AAAAA,IACA0wB,SAASE;AAAAA,IACTzD;AAAAA,IACAwD;AAAAA,EAAAA,CACD,CACF,EACA5yB,KAAK;AAAA;AAAA,CAAM;AAChB;AAEA,SAASgzB,cAAcC,KAAqC;AAC1D,SAAO,OAAOA,OAAQ,WAAWA,MAAMA,IAAI3C;AAC7C;AAEA,SAAS4C,WAAWjxB,QAAckxB,SAAyB;AAClB,SAAA;AAAA,IACrCC,MAAMnxB,OAAMoxB,QAAQC;AAAAA,IACpBC,aAAatxB,OAAMuxB,QAAQF,MAAM/e;AAAAA,IACjCkf,kBAAkBxxB,OAAMyxB,SAASJ,MAAM/e;AAAAA,IACvCof,aAAa1xB,OAAM2xB,MAAMN,MAAM/e;AAAAA,IAGnB4e,OAAO;AACvB;AAEA,SAASL,MAAMe,OAAeV,SAAkBlxB,QAAsB;AAC/DuwB,SAAAA,QAIEU,WAAWjxB,QAAOkxB,OAAO,EAAE,IAAIU,KAAK,GAAG,IAHrC,IAAIA,KAAK;AAIpB;AAEA,MAAMC,iBAAmD;AAAA,EACvDvmB,QAAQ;AAAA,EACRmZ,mBAAmB;AAAA,EACnBpd,iBAAiB;AAAA,EACjBgW,QAAQ;AAAA,EACRyU,OAAO;AACT;AAEA,SAASC,WAAWC,UAAwC;AAC1D,MAAI,QAAQA;AACV,WAAOA,SAASnvB;AAGlB,MAAI,cAAcmvB;AAChB,WAAOA,SAASC,SAASrvB;AAI7B;AAEA,MAAMsvB,gBAAgB,IAAIC,KAAKC,WAAW,SAAS;AAAA,EACjD9sB,MAAM;AACR,CAAC;AAED,SAAS+sB,eAAeryB,QAAcgyB,UAAoB7E,WAA8B;AACtF,QAAMmF,eAAezB,MAAMmB,SAAS1sB,MAAMusB,eAAeG,SAAS1sB,IAAI,GAAGtF,MAAK,GAExEuyB,eACJ,cAAcP,YAAY7E,UAAUjC,gBAChC2F,MACE,cAAcmB,WACVA,SAASC,SAAS3qB,QAClB4qB,cAAcM,OAAOrF,UAAUjC,iBAAiB,CAAE,CAAA,GACtD,QACAlrB,MACF,IACA;AAGN,SAAO,CAACsyB,cAAcC,cAAcvyB,OAAM0W,UAAUqb,WAAWC,QAAQ,CAAC,CAAC,EACtEpe,OAAOyC,OAAO,EACdtY,KAAK,GAAG;AACb;AAEO,SAAS+yB,qBAAqB;AAAA,EACnC9wB,OAAAA;AAAAA,EACA0wB;AAAAA,EACAvD;AAAAA,EACAwD,aAAa;AACa,GAAW;AAC/B8B,QAAAA,OACJ,aAAa/B,UAAU1wB,OAAMgE,KAAK,iBAAiB0sB,QAAQjmB,SAASioB,UAAU,GAAG,IAAI,IACjFC,SAAS,CAACN,eAAeryB,QAAO0wB,SAASvD,SAAS,GAAGsF,IAAI,EAAE10B,KAAK,GAAG,GACnE60B,UAAU,IAAI/C,OAAOc,UAAU;AAErC,MACED,QAAQprB,SAAS,YACjBorB,QAAQprB,SAAS,uBACjBorB,QAAQprB,SAAS;AAEjB,WAAO,CAACqtB,QAAQ;AAAA,GAAM1D,OAAOzvB,KAAKC,UAAUixB,QAAQuB,UAAU,MAAM,CAAC,GAAGtB,UAAU,CAAC,EAAE5yB,KAAK,EAAE;AAG1F2yB,MAAAA,QAAQprB,SAAS,SAAS;AAC5B,UAAMgrB,OAAOH,cAAyBO,QAAQmC,QAAQjtB,KAAM,CAAA,GACtDopB,gBAAgBvrB,KAAKJ,IAAIorB,aAAa6B,KAAK5B,QAAQ,IAAI,GAAG,EAAE;AAElE,WAAO,CACLiE,QACA;AAAA,GACA7D,WAAsB;AAAA,MACpBC,MAAMuB,KAAK5B;AAAAA,MACXM;AAAAA,MACAC,QAAQ2D;AAAAA,MACRvD,YAAayC,CAAAA,UAAUgB,oBAAoB9yB,QAAO8xB,KAAK;AAAA,IAAA,CACxD,CAAC,EACF/zB,KAAK,EAAE;AAAA,EAAA;AAGJ40B,SAAAA;AACT;AAEA,SAASG,oBAAoB9yB,QAAc8xB,OAA0B;AAC7D,QAAA;AAAA,IAACiB;AAAAA,EAAAA,IAAMjB,OACPkB,gBAAgBhzB,OAAMsS,KAAKygB,GAAGztB,IAAI;AACxC,MAAIytB,GAAGztB,SAAS;AACd,WAAO,GAAGtF,OAAMyF,IAAIutB,aAAa,CAAC;AAEpC,MAAID,GAAGztB,SAAS;AACd,WAAO,GAAGtF,OAAMuW,OAAOyc,aAAa,CAAC,IAAID,GAAGptB,KAAK;AAEnD,MAAIotB,GAAGztB,SAAS,SAASytB,GAAGztB,SAAS;AACnC,WAAO,GAAGtF,OAAMuW,OAAOyc,aAAa,CAAC,IAAID,GAAGE,MAAM;AAEpD,MAAIF,GAAGztB,SAAS;AACP,WAAA,GAAGtF,OAAMuW,OAAOyc,aAAa,CAAC,IAAIxzB,KAAKC,UAAUszB,GAAGptB,KAAK,CAAC;AAEnE,MAAIotB,GAAGztB,SAAS;AACP,WAAA,GAAGtF,OAAMiM,MAAM+mB,aAAa,CAAC,IAAIxzB,KAAKC,UAAUszB,GAAGptB,KAAK,CAAC;AAElE,MAAIotB,GAAGztB,SAAS;AACd,WAAO,GAAGtF,OAAMiM,MAAM+mB,aAAa,CAAC,IAAID,GAAGG,QAAQ,KAAKnC,cACtDgC,GAAGI,aACL,CAAC,KAAK3zB,KAAKC,UAAUszB,GAAGK,KAAK,CAAC;AAEhC,MAAIL,GAAGztB,SAAS;AACd,WAAO,GAAGtF,OAAMuW,OAAOyc,aAAa,CAAC,IAAIjC,cAAcgC,GAAGI,aAAa,CAAC,KAAK3zB,KAAKC,UAChFszB,GAAGK,KACL,CAAC;AAEH,MAAIL,GAAGztB,SAAS;AACP,WAAA,GAAGtF,OAAMyF,IAAIutB,aAAa,CAAC,IAAID,GAAGM,UAAU,KAAKN,GAAGO,QAAQ;AAGrE,QAAM,IAAIr3B,MAAM,2BAA2B82B,GAAGztB,IAAI,EAAE;AACtD;AAEA,SAAS2pB,OAAOyB,SAAiBhtB,QAAO,GAAW;AAC3CkvB,QAAAA,UAAU,IAAI/C,OAAOnsB,KAAI;AAE/B,SAAOgtB,QACJhT,MAAM;AAAA,CAAI,EACVhb,IAAK6wB,CAAAA,SAASX,UAAUW,IAAI,EAC5Bx1B,KAAK;AAAA,CAAI;AACd;ACtLA,MAAMgK,aAAW;AAAA;AAAA;AAAA,oGAGmFyrB,QAAwB,wBAAA,cAAcC,oCAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6BtK,SAAS1hB,cAAc3U,MAAyB;AAC9C,SAAO4U,uBAAMC,QAAAA,QAAQ7U,KAAKoJ,QAAQ7J,QAAQ6J,IAAI,EAAEpE,MAAM,CAAC,CAAC,EACrDqI,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAK,EACnDoC,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,IAAU+C,SAASorB,QAAAA;AAAAA,EAAAA,CAA6B,EAC9EhpB,QAAQ,YAAY;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAA,CAAK,EACpDoC,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACnCmF,QAAQ,eAAe;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACvCmF,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,EAAA,CAAS,EACnCmF,QAAQ,WAAW;AAAA,IAACnF,MAAM;AAAA,IAAW+C,SAAS;AAAA,EAAK,CAAA,EAAE7B;AAC1D;AAEA,MAAMktB,sBAAyD;AAAA,EAC7D13B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA;AAAA,EAEbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQuL;AAAAA,MAAQ/I,OAAAA;AAAAA,MAAOtC;AAAAA,IAAWL,IAAAA,SAC9C,CAACwF,EAAE,IAAIzF,KAAKqJ,oBACZktB,0BAA0Bx1B,cAAKJ,QAAAA,KAAKL,SAASotB,oBAAoB,GAEjEntB,QAAQ,MAAMoU,cAAc3U,IAAI,GAEhCw2B,aAAaj2B,MAAMi2B,YACnBC,MAAMl2B,MAAMm2B,QACZhxB,UAAUnF,MAAMmF,SAChBikB,UAAUppB,MAAMopB;AAEtB,QAAKjkB,WAAW,CAACikB,WAAaA,WAAW,CAACjkB;AAClC,YAAA,IAAI7G,MAAM,qEAAqE;AAGvF,QAAI,CAAC4G,IAAI;AACPrF,aAAOC,MAAMuC,OAAMyF,IAAI,sCAAsC,CAAC;AAC9D,YAAMunB,aAAa,MAAMC,kBAAkBvvB,OAAO,GAC5CiX,QAAQ,IAAIC,0BAAM;AAAA,QACtBE,OAAO;AAAA,QACPD,SAAS,CACP;AAAA,UAAC7Y,MAAM;AAAA,UAAM8Y,OAAO;AAAA,UAAMC,WAAW;AAAA,QAAA,GACrC;AAAA,UAAC/Y,MAAM;AAAA,UAAS8Y,OAAO;AAAA,UAASC,WAAW;AAAA,QAAO,CAAA;AAAA,MAAA,CAErD;AAEDiY,iBAAW/oB,QAASipB,CAAqB,qBAAA;AACvCvY,cAAMK,OAAO;AAAA,UAACnS,IAAIqqB,iBAAiBrqB;AAAAA,UAAIiS,OAAOoY,iBAAiBC,UAAUrY;AAAAA,QAAAA,CAAM;AAAA,MAAA,CAChF,GACDH,MAAMQ,cACN3X,OAAOuC,MAAM,sDAAsD;AAEnE;AAAA,IAAA;AAISstB,kBAAA;AAAA,MACPtN,QAAQ,OAAOpjB,QAAQuC,QAAQkD,MAAM,CAAC,CAAC;AAAA,MACvCkrB,WAAW;AAAA,QAAC,kBAAkB;AAAA,MAAA;AAAA,IAAI,CACnC;AAGGQ,UAAAA,aAAazB,uBAAuB3uB,SAASmF,EAAE,GAC/CkxB,kBAAkBjG,WAAWla,OAAO+Y,yBAAyB;AAEnE,QAAIoH,gBAAgBluB,SAAS;AAErB,YAAA,IAAI5J,MACR,kCAAkC4G,EAAE,QAAQ7C,OAAMgE,KAAK2vB,uBAAuB,CAAC;AAAA,KAAU7F,WACtFprB,IAAKqrB,CAAAA,cAAc5vB,cAAK61B,QAAAA,SAASL,yBAAyB5F,UAAUtB,YAAY,CAAC,EACjF1uB,KAAK;AAAA,IAAO,CAAC,EAClB;AAGI6uB,UAAAA,SAASmH,gBAAgB,CAAC;AAChC,QAAI,CAACnH;AACG,YAAA,IAAI3wB,MACR,2BAA2B4G,EAAE,QAAQ7C,OAAMgE,KAAKhE,OAAMgE,KAAK2vB,uBAAuB,CAAC,CAAC;AAAA;AAAA;AAAA,KAC1D7F,WAC9BprB,IAAKqrB,CAAAA,cAAc5vB,cAAK61B,QAAAA,SAASL,yBAAyB5F,UAAUtB,YAAY,CAAC,EACjF1uB,KAAK;AAAA,IAAO,CAAC,EACX;AAGF,UAAM2uB,MAAME,OAAOF;AACf,QAAA,QAAQA,OAAO,UAAUA;AAGrB,YAAA,IAAIzwB,MACR,8EACF;AAGF,UAAMkxB,YAAuBT,IAAIrkB;AAEjC,QAAIurB,cAAc,CAACC;AACX,YAAA,IAAI53B,MAAM,wDAAwD;AAG1E,UAAMqX,cAAc3V,MAAM2V;AAC1B,QAAIA,gBAAgBjI,QAAW;AAC7B,UAAIiI,cAAckgB,QAAAA;AAChB,cAAM,IAAIv3B,MACR,oDAAoDu3B,QAAAA,wBAAwB,EAC9E;AAGF,UAAIlgB,gBAAgB;AAClB,cAAM,IAAIrX,MAAM,8CAA8CqX,WAAW,EAAE;AAAA,IAAA;AAI/E,UAAM2gB,gBAAgB7vB,UAAU;AAAA,MAC9BE,aAAa;AAAA,MACbC,gBAAgB;AAAA,IACjB,CAAA,EAAEG,OAAO;AAEN,QAAA,CAACqiB,WAAW,CAACkN,cAAclxB;AACvB,YAAA,IAAI9G,MACR,6GACF;AAGF,UAAMi4B,YAAY;AAAA,MAChBpxB,SAASA,WAAWmxB,cAAcnxB;AAAAA,MAClCC,WAAWgkB,WAAWkN,cAAclxB;AAAAA,MACpCoxB,SAASF,cAAcE;AAAAA,MACvB3oB,OAAOyoB,cAAczoB;AAAAA,MACrB/G,YAAY;AAAA,IACd;AACA,QAAIovB,KAAK;AACO,oBAAA;AACd;AAAA,IAAA;AAUF,QAPAr2B,OAAOuC,MACL;AAAA,EAAKC,OAAMuW,OAAOvW,OAAMsS,KAAK,qDAAqD,CAAC,CAAC,EACtF,GACA9U,OAAOuC,MACL,wDAAwDC,OAAMgE,KAAK,eAAe,CAAC;AAAA,CACrF,GAEIrG,MAAMy2B,WAQJ,CAPa,MAAMrrB,OAAOM,OAAgB;AAAA,MAC5CvJ,SAAS,kCAAkCE,OAAMuW,OAC/CvW,OAAMsS,KAAK4hB,UAAUpxB,OAAO,CAC9B,CAAC,eAAe9C,OAAMuW,OAAOvW,OAAMsS,KAAK4hB,UAAUnxB,SAAS,CAAC,CAAC;AAAA,MAC7DuC,MAAM;AAAA,IAAA,CACP,GAEc;AACb2E,cAAM,wBAAwB;AAC9B;AAAA,IAAA;AAIJ,UAAMvL,UAAUlB,OAAOkB,QAAQ,sBAAsBmE,EAAE,GAAG,EAAE9G,MAAM;AAClE,UAAMs4B,YAAI;AAAA,MAAClN,KAAK+M;AAAAA,MAAW5gB;AAAAA,MAAaiL,YAAY+V,eAAe51B,OAAO;AAAA,IAAA,GAAIyuB,SAAS,GACvFzuB,QAAQ61B,KAAK;AAEb,aAASD,eAAe/hB,iBAAoD;AAC1E,aAAO,SAAoBpF,WAA6B;AAClD,YAAA,CAACxP,MAAMwP,UAAU;AACnBoF,0BAAgBgiB,KAAK;AACrB;AAAA,QAAA;AAEF,YAAIpnB,UAASqnB,MAAM;AACDnjB,0BAAAA,OAAO,cAAcxO,EAAE;AAAA;AAAA,iBAEhC7C,OAAMsS,KAAK4hB,UAAUnxB,SAAS,CAAC;AAAA,iBAC/B/C,OAAMsS,KAAK4hB,UAAUpxB,OAAO,CAAC;AAAA;AAAA,IAE1CqK,UAAS+W,SAAS;AAAA,IAClB/W,UAASmX,SAAS;AAAA,IAClBtkB,OAAMiM,MAAMkB,UAASsnB,sBAAsB5uB,MAAM,CAAC,4BAC5C0M,gBAAgBmiB,eAAe;AAAA,YAACC,QAAQ30B,OAAMiM,MAAM,QAAG;AAAA,UAAA,CAAE;AACzD;AAAA,QAAA;AAGD,SAAC,MAAM,GAAGkB,UAASynB,mBAAmB,EAAE3wB,QAASmD,CAAgB,gBAAA;AAChEmL,0BAAgBlB,OAAO,sBAAsBxO,EAAE,KAAKgxB,MAAM,mBAAmB,KAAK;AAAA;AAAA,oBAExE7zB,OAAMsS,KAAK4hB,UAAUnxB,SAAS,CAAC;AAAA,oBAC/B/C,OAAMsS,KAAK4hB,UAAUpxB,OAAO,CAAC;AAAA,oBAC7B9C,OAAMsS,KAAK6a,UAAUjC,eAAentB,KAAK,GAAG,CAAC,CAAC;AAAA;AAAA,IAE9DoP,UAAS+W,SAAS;AAAA,IAClB/W,UAASmX,SAAS;AAAA,IAClBtkB,OAAM60B,KAAK1nB,UAAS2nB,OAAO,CAAC;AAAA,IAC5B90B,OAAMiM,MAAMkB,UAASsnB,sBAAsB5uB,MAAM,CAAC;AAAA;AAAA,IAGlDuB,eAAe,CAAC+F,UAASqnB,OACrB,QAAK/D,aAAa;AAAA,YAACzwB,OAAAA;AAAAA,YAAO0wB,SAAStpB;AAAAA,YAAa+lB;AAAAA,YAAWwD,YAAY;AAAA,UAAA,CAAE,CAAC,KAC1E,EAAC;AAAA,QAAA,CAEA;AAAA,MACH;AAAA,IAAA;AAGF,mBAAeoE,gBAAgB;AAC7Bv3B,aAAOuC,MAAM,sBAAsB8C,EAAE,eAAe,GAEhD+wB,cACFp2B,OAAOuC,MAAM,gBAAgBC,OAAMgE,KAAK4vB,UAAU,CAAC,EAAE,GAGvDp2B,OAAOuC,MAAM,GACbvC,OAAOuC,MAAM,gBAAgBC,OAAMsS,KAAK4hB,UAAUnxB,SAAS,CAAC,EAAE,GAC9DvF,OAAOuC,MAAM,gBAAgBC,OAAMsS,KAAK4hB,UAAUpxB,OAAO,CAAC,EAAE;AAE5D,uBAAiBkvB,YAAY8B,eAAO;AAAA,QAAC3M,KAAK+M;AAAAA,QAAWc,YAAYpB;AAAAA,SAAazG,SAAS;AAChF6E,qBACLx0B,OAAOuC,MAAAA,GACPvC,OAAOuC,MACL0wB,aAAa;AAAA,UACXzwB,OAAAA;AAAAA,UACA0wB,SAASsB;AAAAA,UACT7E;AAAAA,QACD,CAAA,CACH;AAAA,IAAA;AAAA,EAEJ;AAEJ,GC7QMplB,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXktB,iBAAuC;AAAA,EAC3Cj5B,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,aAEsB,MAAM4L,mBAAiB,GAExB7L,MAAMC,OAAO;AAAA,EAEpC0K,UAAAA;AACF;AAEA,eAAekB,qBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC9CA,MAAMP,gBAAc,gCAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgBXmtB,sBAAsB;AAAA,EAC1Bl5B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,yBAAyC;AAAA,EAAA,CAAA,GAEvDgL,QAAQjL,MAA2DC,OAAO;AAEzF,GC/BMyK,gBAAc,8EAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXotB,uBAA6C;AAAA,EACjDn5B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,oBAAoC;AAAA,EAAA,CAAA,GAElDgL,QAAQjL,MAAMC,OAAO;AAEpC;AC7BA,IAAe,cAAA;AAAA,EACbrB,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf;ACDA,MAAMA,gBAAc,6CAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAgBXqtB,qBAAqB;AAAA,EACzBp5B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM;;MAEPgL,QAAQjL,MAAyDC,OAAO;AAEvF,GC7BMyK,gBAAc,0CAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAiBXstB,qBAAqB;AAAA,EACzBr5B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,aACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,YAAY;AAC/B,UAAMqvB,MAAM,MAAM;;QAEZ4I,eAAe;AAAA,MACnB,GAAGl4B;AAAAA,MACHQ,YAAY;AAAA,QACV,GAAGR,KAAKQ;AAAAA,QACR,mBAAmB;AAAA,MAAA;AAAA,IAEvB;AAEO8uB,WAAAA,IAAIrkB,QACTitB,cACAj4B,OACF;AAAA,EAAA;AAEJ,GC3CMyK,cAAc,wDAEdC,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAiBX0f,2BAAiD;AAAA,EACrDzrB,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXE;AAAAA,EAAAA,UACAC;AAAAA,EACAG,QAAQ,OAAO9K,MAAMC,aACP,MAAM,QAAA,QAAA,EAAA,KAAA,WAAA;AAAA,WAAA,QAAO,qBAAqC;AAAA,EAAA,CAAA,GAEnDgL,QAAQjL,MAAMC,OAAO;AAEpC,GCtBM0K,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcXwtB,eAAqC;AAAA,EACzCv5B,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXE,aAAa;AAAA,EACbI,QAAQ,OACN9K,MACAC,YACG;AACG,UAAA;AAAA,MAACG;AAAAA,MAAQwC,OAAAA;AAAAA,MAAO+I;AAAAA,IAAM,IAAI1L,SAC1B2L,gBAAgB,MAAMC,oBAEtBE,OAAQD,CAAAA,QAAgB1L,OAAO2L,KAAKnJ,OAAMuW,OAAOnN,QAAQF,GAAG,CAAC,GAC7DzL,QAASyL,CAAgB1L,QAAAA,OAAO2L,KAAKnJ,OAAMyF,IAAI2D,QAAQF,GAAG,CAAC;AACjEC,SAAK,gXAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,oEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,oEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,yEAA+D,GACpEA,KAAK,gXAA+D,GACpEA,KAAK,EAAE;AAEH,QAAA;AACIH,YAAAA,cAAc5L,MAAMC,OAAO;AAAA,aAC1BE,KAAK;AACZ,UAAIA,IAAIvB,SAAS;AACTuB,cAAAA;AAGFA,YAAAA,IAAIuC,OAAO,GACjBrC,MAAM;AAAA,CAAI,GAGRgL,iBACC,MAAMM,OAAOM,OAAO;AAAA,QACnBvJ,SAAS;AAAA,QACTwF,MAAM;AAAA,MAAA,CACP,IAID,OADkB,MAAMkD,gBACRpL,MAAMC,OAAO,IAI7BV,QAAQ2M,KAAK,CAAC;AAAA,IAAA;AAAA,EAGpB;AAAA,EACAvB,UAAAA;AACF;AAEA,eAAekB,mBAAmB;AAUpB,UAAA,MAAM;mBAAO,oBAAqC;AAAA,EAAA,CAAA,GAEnDZ;AACb;AC5FO,SAASmtB,mBAAmB11B,SAAiB;AAClD,SAAQvC,CAAsE,QAAA;AAC5E,UAAIA,IAAIqM,eAAe,QACrBrM,IAAIuC,UAAUA,UACRvC;AAAAA,EAIV;AACF;ACJA,MAAMwK,aAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAmBX0tB,oBAAuD;AAAA,EAC3Dz5B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EAAA,UACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQuL;AAAAA,IAAU1L,IAAAA,SAC9B,CAACq4B,aAAa,IAAIt4B,KAAKqJ,oBACvB9I,QAAQP,KAAKQ,YAEbyG,SAASD,UAAYuc,EAAAA,MAAAA,EAAQjc,OAAO;AAAA,MAACixB,oBAAoB;AAAA,MAAOlxB,YAAY;AAAA,IAAA,CAAa,GACzF;AAAA,MAAC1B;AAAAA,IAAAA,IAAasB,OAAOK,UACrBkxB,SAAS,MAAMvxB,OAAOsH,QAAgB;AAAA,MAACI,KAAK,aAAahJ,SAAS;AAAA,IAAA,CAAS,GAAG6Q,OACjFiiB,CAAAA,UAASA,MAAKC,cACjB,GACMC,QAAQL,iBAAkB,MAAMM,eAAejtB,MAAM,GACrDktB,eAAet4B,MAAMk4B,QAAS,MAAMK,cAAcntB,QAAQ6sB,KAAK,GAC/DC,OAAOD,MAAMnuB,KAAK,CAAC;AAAA,MAACzL;AAAAA,UAAUA,KAAKqO,YAAAA,MAAkB4rB,aAAa5rB,aAAa;AACrF,QAAI,CAACwrB;AACH,YAAM,IAAI55B,MAAM,cAAcg6B,YAAY,aAAa;AAGnD5xB,UAAAA,OACHsc,MAAM,EACNhV,QAAQ;AAAA,MACPC,QAAQ;AAAA,MACRG,KAAK,wBAAwBhJ,SAAS;AAAA,MACtCgH,MAAM;AAAA,QAACgsB;AAAAA,QAAOF,MAAMA,KAAK75B;AAAAA,MAAI;AAAA,MAC7Bm6B,cAAc;AAAA,MACdxmB,cAAc;AAAA,IACf,CAAA,EACAmP,MACC0W,mBACE,yGACF,CACF,GAEFh4B,OAAOuC,MAAM,sBAAsBg2B,KAAK,EAAE;AAAA,EAAA;AAE9C;AAIA,SAASC,eAAejtB,QAAsC;AAC5D,SAAOA,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACT8T,QAASwiB,CAAQA,QAAAA,IAAI3c,KAAK;AAAA,IAC1B/O,UAAW1O,UACL,CAACA,QAAQ,CAACA,KAAKga,SAAS,GAAG,IACtB,kBAGF;AAAA,EAAA,CAEV;AACH;AAEA,SAASkgB,cAAcntB,QAAqB6sB,OAAgC;AAC1E,SAAO7sB,OAAOM,OAAO;AAAA,IACnB/D,MAAM;AAAA,IACNxF,SAAS;AAAA,IACToL,SAAS0qB,MAAMlzB,IAAKmzB,CAAU,UAAA;AAAA,MAC5BlwB,OAAOkwB,KAAK75B;AAAAA,MACZA,MAAM,GAAG65B,KAAK/gB,KAAK,KAAK+gB,KAAK/tB,WAAW;AAAA,IAAA,EACxC;AAAA,EAAA,CACH;AACH;ACvFA,MAAMuuB,aAAa,CAAC,MAAM,QAAQ,QAAQ,MAAM,GAE1CtuB,WAAW;AAAA;AAAA;AAAA;AAAA,mDAIkCsuB,WAAWt4B,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAclEu4B,mBAAyC;AAAA,EAC7Ct6B,MAAM;AAAA,EACNiM,OAAO;AAAA,EACPL,WAAW;AAAA,EACXG;AAAAA,EACAD,aAAa;AAAA,EACbI,QAAQ,OAAO9K,MAAMC,YAAY;AACzB,UAAA;AAAA,MAAC+G;AAAAA,MAAW5G;AAAAA,MAAQwC,OAAAA;AAAAA,QAAS3C,SAC7B;AAAA,MAACk5B;AAAAA,MAAMC;AAAAA,MAAOC;AAAAA,MAAQC;AAAAA,IAAAA,IAAe;AAAA,MACzCH,MAAM;AAAA,MACNC,OAAO;AAAA,MACPC,QAAQ;AAAA,MACRC,aAAa;AAAA,MACb,GAAGt5B,KAAKQ;AAAAA,IACV;AAEI,QAAA,CAACy4B,WAAWrgB,SAASugB,IAAI;AACrB,YAAA,IAAIt6B,MAAM,wBAAwBs6B,IAAI,qBAAqBF,WAAWt4B,KAAK,IAAI,CAAC,EAAE;AAGtFy4B,QAAAA,UAAU,SAASA,UAAU;AAC/B,YAAM,IAAIv6B,MAAM,uBAAuBu6B,KAAK,mCAAmC;AAGjF,UAAMnyB,SAASD,aACTuyB,eAAetyB,OAAOsc,QAAQjc,OAAO;AAAA,MAACixB,oBAAoB;AAAA,IAAA,CAAM,GAChE;AAAA,MAAC5yB;AAAAA,QAAasB,OAAOK,OAAAA,GAErByxB,eAAe,IACf,CAACS,oBAAoB7P,OAAO,IAAI,MAAMpmB,QAAQY,IAAI,CACtDm1B,cACIC,aACGhrB,QAAkB;AAAA,MAACI,KAAK,wBAAwBhJ,SAAS;AAAA,MAAIozB;AAAAA,IAAAA,CAAa,EAC1Ezd,KAAKme,qBAAqB,IAC7B,CAAA,GACJF,aAAahrB,QAAgC;AAAA,MAACI,KAAK,aAAahJ,SAAS;AAAA,MAAIozB;AAAAA,IAAa,CAAA,CAAC,CAC5F,GAEKW,YAAY/P,QAAQgQ,QAAQr0B,IAAKs0B,CAAAA,WAAWA,OAAOn0B,EAAE,GACrDo0B,QAAQ,MAAMN,aACjBhrB,QAAuB;AAAA,MAACI,KAAK,UAAU+qB,UAAU/4B,KAAK,GAAG,CAAC;AAAA,MAAIo4B;AAAAA,IAAAA,CAAa,EAC3Ezd,KAAMwe,UAAU9S,MAAMC,QAAQ6S,IAAI,IAAIA,OAAO,CAACA,IAAI,CAAE,GASjDH,UAAU,CAAC,GAPMhQ,QAAQgQ,QAC5Br0B,IAAKs0B,CAAY,YAAA;AAAA,MAChB,GAAGA;AAAAA,MACH,GAAGG,aAAaF,MAAMxvB,KAAMsmB,eAAcA,UAAUlrB,OAAOm0B,OAAOn0B,EAAE,CAAC;AAAA,IAAA,EACrE,EACD+Q,OAAQojB,CAAW,WAAA,CAACA,OAAOI,WAAWX,MAAM,GAEX,GAAGG,kBAAkB,GAEnDp0B,UAAUC,gBACds0B,QAAAA,QAAQr0B,IAAI,CAAC;AAAA,MAACG;AAAAA,MAAI7G;AAAAA,MAAM65B;AAAAA,MAAMzgB;AAAAA,IAAAA,MAAU,CAACvS,IAAI7G,MAAM65B,MAAMzgB,IAAI,CAAC,GAC9D,CAACihB,WAAWpX,QAAQsX,IAAI,CAAC,CAC3B,GAEMrzB,OAAOszB,UAAU,QAAQh0B,UAAUA,QAAQW,QAAQ,GAEnDC,YAAYF,KAAK9B,OACrB,CAACiC,KAAKC,QAAQA,IAAIZ,IAAI,CAACa,SAASC,UAAUC,KAAKJ,IAAIK,cAAAA,QAAKH,OAAO,GAAGF,IAAIG,KAAK,CAAC,CAAC,GAC7E6yB,WAAW3zB,IAAKiB,CAAQD,QAAAA,cAAAA,QAAKC,GAAG,CAAC,CACnC,GAEMC,WAAYN,CAAkB,QAAA;AAC5B+zB,YAAAA,WAAW/zB,IAAI,CAAC,MAAM,aACtBg0B,UAAUh0B,IAAIZ,IAAI,CAACmB,KAAKC,MAAM,GAAGD,GAAG,GAAGE,OAAOX,UAAUU,CAAC,CAAC,CAAC,EAAE/F,KAAK,KAAK;AAC7E,aAAOs5B,WAAWr3B,OAAMu3B,IAAID,OAAO,IAAIA;AAAAA,IACzC;AAEA95B,WAAOuC,MAAMC,OAAMgE,KAAKJ,SAASyyB,UAAU,CAAC,CAAC,GAC7CnzB,KAAKe,QAASX,SAAQ9F,OAAOuC,MAAM6D,SAASN,GAAG,CAAC,CAAC;AAAA,EAAA;AAErD;AAEA,SAAS6zB,aAAaD,MAAwB;AACtC,QAAA;AAAA,IAACM,aAAax7B;AAAAA,IAAMmD,WAAWiW;AAAAA,EAAI,IAAI8hB,QAAQ,CAAC;AAC/C,SAAA;AAAA,IAACl7B,MAAMA,QAAQ;AAAA,IAAIoZ,MAAMA,QAAQ;AAAA,EAAE;AAC5C;AAEA,SAASyhB,sBAAsBH,aAAuB;AACpD,SAAOA,YACJ9iB,OAAQ6jB,CAAW,WAAA,CAACA,OAAOC,cAAc,CAACD,OAAOE,aAAa,CAACF,OAAOG,gBAAgB,EACtFl1B,IAAK+0B,CAAY,YAAA;AAAA,IAChB50B,IAAI;AAAA,IACJ7G,MAAMy7B,OAAO1B;AAAAA,IACbF,MAAM4B,OAAO5B;AAAAA,IACbzgB,MAAMqiB,OAAOt4B;AAAAA,EAAAA,EACb;AACN;AChHO,MAAM04B,aAAwC;AAAA,EACnD77B,MAAM;AAAA,EACN4L,WAAW;AAAA,EACXC,aAAa;AAAA,EACbC,aAAa;AACf,GC2DMgwB,eAAqE,CACzEnwB,UACAW,kBACAC,eACAP,iBACAc,iBACAyM,cACAwiB,cACA1V,eACAC,iBACAF,qBACAzF,sBACAM,0BACAgB,sBACA2B,sBACAxC,sBACAlB,oBACA8b,cACAC,oBACAC,0BACAhmB,uBACAimB,6BACAC,4BACAhhB,WACAM,wBACAP,sBACAE,yBACAwgB,YACApC,mBACAa,kBACA3N,WACA+B,kBACA3C,mBACAsQ,gBACA3M,wBACAgI,qBACA4E,sBACAjQ,mBACAmB,qBACAZ,yBACA7C,gBACAc,qBACA0R,uBACA9S,wBACAlD,wBACAkF,4BACAI,cACAC,wBACAF,yBACAD,yBACA9e,YACA0sB,cACAiD,aACAC,0BACAtD,sBACAF,gBACAvN,aACAgR,eACA9N,sBAAsB,GAIlB+N,yBAAyB,CAACvD,oBAAoBC,oBAAoBH,mBAAmB,GAGrF0D,WAAiE,CACrE,GAAGd,cACH,GAAI3zB,uBAAuBw0B,yBAAyB,EAAG,GAO5CE,qBAAqB;AAAA,EAChCC,yBAAyB;AAAA,EACzBF;AACF;;;;;;;;;;;;;;"}