sanity 5.0.0-next-major.14 → 5.0.0-next-major.20251210134624
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/sanity +2 -1
- package/lib/_chunks-es/LiveQueries.js +1 -1
- package/lib/_chunks-es/LiveQueries.js.map +1 -1
- package/lib/_chunks-es/MenuGroup.js +3 -3
- package/lib/_chunks-es/MenuGroup.js.map +1 -1
- package/lib/_chunks-es/PostMessagePreviewSnapshots.js +2 -2
- package/lib/_chunks-es/PostMessagePreviewSnapshots.js.map +1 -1
- package/lib/_chunks-es/PresentationToolGrantsCheck.js +54 -54
- package/lib/_chunks-es/PresentationToolGrantsCheck.js.map +1 -1
- package/lib/_chunks-es/QRCodeSVG.js +1 -1
- package/lib/_chunks-es/QRCodeSVG.js.map +1 -1
- package/lib/_chunks-es/StructureToolProvider.js +1 -1
- package/lib/_chunks-es/StructureToolProvider.js.map +1 -1
- package/lib/_chunks-es/VideoPlayer.js +1 -1
- package/lib/_chunks-es/VideoPlayer.js.map +1 -1
- package/lib/_chunks-es/ViteDevServerStopped.js +6 -6
- package/lib/_chunks-es/ViteDevServerStopped.js.map +1 -1
- package/lib/_chunks-es/_internal.js +1 -1
- package/lib/_chunks-es/_internal.js.map +1 -1
- package/lib/_chunks-es/deployApiAction.js +1 -1
- package/lib/_chunks-es/deployApiAction.js.map +1 -1
- package/lib/_chunks-es/execScript.js +2 -1
- package/lib/_chunks-es/execScript.js.map +1 -1
- package/lib/_chunks-es/exportAssetsAction.js +1 -1
- package/lib/_chunks-es/exportAssetsAction.js.map +1 -1
- package/lib/_chunks-es/extractAction.js +2 -1
- package/lib/_chunks-es/extractAction.js.map +1 -1
- package/lib/_chunks-es/extractManifestAction.js +2 -1
- package/lib/_chunks-es/extractManifestAction.js.map +1 -1
- package/lib/_chunks-es/generate-help-url.esm.js +1 -1
- package/lib/_chunks-es/generate-help-url.esm.js.map +1 -1
- package/lib/_chunks-es/getGraphQLAPIs.js +2 -1
- package/lib/_chunks-es/getGraphQLAPIs.js.map +1 -1
- package/lib/_chunks-es/helpers.js +2 -1
- package/lib/_chunks-es/helpers.js.map +1 -1
- package/lib/_chunks-es/index2.js +1 -1
- package/lib/_chunks-es/index2.js.map +1 -1
- package/lib/_chunks-es/index3.js +8 -8
- package/lib/_chunks-es/index3.js.map +1 -1
- package/lib/_chunks-es/package.js +1 -1
- package/lib/_chunks-es/pane.js +140 -140
- package/lib/_chunks-es/pane.js.map +1 -1
- package/lib/_chunks-es/presentation.js +4 -4
- package/lib/_chunks-es/presentation.js.map +1 -1
- package/lib/_chunks-es/previewServer.js +2 -1
- package/lib/_chunks-es/previewServer.js.map +1 -1
- package/lib/_chunks-es/runtime.js +14 -13
- package/lib/_chunks-es/runtime.js.map +1 -1
- package/lib/_chunks-es/validateAction.js +2 -1
- package/lib/_chunks-es/validateAction.js.map +1 -1
- package/lib/_chunks-es/validateAction2.js +2 -1
- package/lib/_chunks-es/validateAction2.js.map +1 -1
- package/lib/_chunks-es/version.js +1 -1
- package/lib/_internal/cli/threads/validateDocuments.cjs +1 -1
- package/lib/_internal/cli/threads/validateDocuments.cjs.map +1 -1
- package/lib/_singletons.d.ts +82 -2
- package/lib/_singletons.js +6 -2
- package/lib/_singletons.js.map +1 -1
- package/lib/desk.d.ts +28 -2
- package/lib/index.d.ts +30 -1
- package/lib/index.js +3739 -3306
- package/lib/index.js.map +1 -1
- package/lib/presentation.d.ts +14 -1
- package/lib/structure.d.ts +14 -1
- package/package.json +34 -34
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"validateAction2.js","sources":["../../src/_internal/cli/actions/validation/reporters/jsonReporter.ts","../../src/_internal/cli/actions/validation/reporters/ndjsonReporter.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/util.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/formatDocumentValidation.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/prettyReporter.ts","../../src/_internal/cli/actions/validation/reporters/index.ts","../../src/_internal/cli/util/workerChannels.ts","../../src/_internal/cli/actions/validation/validateDocuments.ts","../../src/_internal/cli/actions/validation/validateAction.ts"],"sourcesContent":["import {type BuiltInValidationReporter} from '../validateAction'\n\n// TODO: replace with Array.fromAsync when it's out of stage3\nasync function arrayFromAsync<T>(iterable: AsyncIterable<T>) {\n const results: T[] = []\n for await (const item of iterable) results.push(item)\n return results\n}\n\nexport const json: BuiltInValidationReporter = async ({output, worker}) => {\n const results = await arrayFromAsync(worker.stream.validation())\n const formatted = results\n // report out only documents with some markers\n .filter(({markers}) => markers.length)\n // remove validatedCount from the results\n .map(({validatedCount, ...result}) => result)\n\n await worker.dispose()\n\n output.print(JSON.stringify(formatted))\n\n let overallLevel: 'error' | 'warning' | 'info' = 'info'\n\n for (const {level} of formatted) {\n if (level === 'error') overallLevel = 'error'\n if (level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n }\n\n return overallLevel\n}\n","import {type BuiltInValidationReporter} from '../validateAction'\n\nexport const ndjson: BuiltInValidationReporter = async ({output, worker}) => {\n let overallLevel: 'error' | 'warning' | 'info' = 'info'\n\n for await (const {validatedCount, ...result} of worker.stream.validation()) {\n if (result.level === 'error') overallLevel = 'error'\n if (result.level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n\n if (result.markers.length) {\n output.print(JSON.stringify(result))\n }\n }\n\n await worker.dispose()\n\n return overallLevel\n}\n","import {isatty} from 'node:tty'\n\nimport {type ValidationMarker} from '@sanity/types'\nimport logSymbols from 'log-symbols'\n\nexport type Level = ValidationMarker['level']\n\nexport interface DocumentValidationResult {\n revision: string\n documentId: string\n documentType: string\n intentUrl?: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n}\n\nexport const isTty = isatty(1)\nexport const levelValues = {error: 0, warning: 1, info: 2} as const\n\n/**\n * Generates a formatted string that includes a numerical amount and a subject\n * in plural or singular form. Note: the subject must end in `s`\n */\nexport const count = (amount: number, subject: string): string =>\n `${amount.toLocaleString('en-US')} ${\n amount === 1 ? subject.slice(0, Math.max(0, subject.length - 1)) : subject\n }`\n\nconst percentageFormatter = new Intl.NumberFormat('en-US', {\n style: 'percent',\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\n\n/**\n * Given a decimal, this will return that number formatted as a percentage\n */\nexport const percent = (value: number): string => percentageFormatter.format(Math.min(value, 1))\n\nconst secondFormatter = new Intl.NumberFormat('en-US', {\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\n\n/**\n * Given a start time in milliseconds, this will output the time elapsed\n * formatted in seconds\n */\nexport const seconds = (startTime: number): string => {\n const endTime = Date.now()\n return `(${secondFormatter.format((endTime - startTime) / 1000)}s)`\n}\n\ninterface ValidationTotals {\n valid: {documents: number}\n errors: {documents: number; markers: number}\n warnings: {documents: number; markers: number}\n infos: {documents: number; markers: number}\n}\n\n/**\n * Outputs summary text given an object of totals. Omits totals depending on the\n * desired validation `level`\n */\nexport const summary = (\n {errors, infos, valid, warnings}: ValidationTotals,\n level: Level = 'warning',\n): string => {\n const levelValue = levelValues[level]\n\n return [\n `${logSymbols.success} Valid: ${count(valid.documents, 'documents')}`,\n `${logSymbols.error} Errors: ${count(errors.documents, 'documents')}, ${count(\n errors.markers,\n 'errors',\n )}`,\n levelValue >= levelValues.warning &&\n `${logSymbols.warning} Warnings: ${count(warnings.documents, 'documents')}, ${count(\n warnings.markers,\n 'warnings',\n )}`,\n levelValue >= levelValues.info &&\n `${logSymbols.info} Info: ${count(infos.documents, 'documents')}, ${count(\n infos.documents,\n 'markers',\n )}`,\n ]\n .filter(Boolean)\n .join('\\n')\n}\n","import {type Path, type ValidationMarker} from '@sanity/types'\nimport chalk from 'chalk'\nimport logSymbols from 'log-symbols'\n\nimport {convertToTree, formatTree, maxKeyLength, type Tree} from '../../../../util/tree'\nimport {type DocumentValidationResult, isTty, type Level, levelValues} from './util'\n\nexport interface FormatDocumentValidationOptions extends DocumentValidationResult {\n studioHost?: string\n basePath?: string\n}\n\ninterface Marker extends Pick<ValidationMarker, 'level' | 'message'> {\n path: Path\n}\n\ntype ValidationTree = Tree<Marker>\n\nconst levelHeaders = {\n error: isTty ? chalk.bold(chalk.bgRed(chalk.black(' ERROR '))) : chalk.red('[ERROR]'),\n warning: isTty ? chalk.bold(chalk.bgYellow(chalk.black(' WARN '))) : chalk.yellow('[WARN]'),\n info: isTty ? chalk.bold(chalk.cyan(chalk.black(' INFO '))) : chalk.cyan('[INFO]'),\n}\n/**\n * Creates a terminal hyperlink. Only outputs a hyperlink if the output is\n * determined to be a TTY\n */\nconst link = (text: string, url: string) =>\n isTty ? `\\u001b]8;;${url}\\u0007${text}\\u001b]8;;\\u0007` : chalk.underline(text)\n\n/**\n * For sorting markers\n */\nconst compareLevels = <T extends {level: Level; message: string}>(a: T, b: T) =>\n levelValues[a.level] - levelValues[b.level]\n\n/**\n * Formats the markers at the root of the validation tree\n */\nconst formatRootErrors = (root: ValidationTree, hasChildren: boolean, paddingLength: number) => {\n if (!root.nodes) return ''\n\n const [first, ...rest] = root.nodes.slice().sort(compareLevels)\n if (!first) return ''\n\n const firstElbow = hasChildren ? '│ ' : '└─'\n const firstPadding = '.'.repeat(paddingLength - 6)\n const firstLine = `${firstElbow} (root) ${firstPadding} ${logSymbols[first.level]} ${\n first.message\n }`\n const subsequentPadding = ' '.repeat(paddingLength + 2)\n const subsequentElbow = hasChildren ? '│ ' : ' '\n\n const restOfLines = rest\n .map(\n (marker) =>\n `${subsequentElbow}${subsequentPadding} ${logSymbols[marker.level]} ${marker.message}`,\n )\n .join('\\n')\n return [firstLine, restOfLines].filter(Boolean).join('\\n')\n}\n\n/**\n * Formats document validation results into a user-friendly tree structure\n */\nexport function formatDocumentValidation({\n documentId,\n documentType,\n level,\n markers,\n intentUrl,\n}: FormatDocumentValidationOptions): string {\n const tree = convertToTree<Marker>(markers)\n\n const documentTypeHeader = isTty\n ? chalk.bgWhite(chalk.black(` ${documentType} `))\n : `[${documentType}]`\n\n const header = `${levelHeaders[level]} ${documentTypeHeader} ${\n intentUrl ? link(documentId, intentUrl) : chalk.underline(documentId)\n }`\n\n const paddingLength = Math.max(maxKeyLength(tree.children) + 2, 30)\n\n const childErrors = formatTree<Marker>({\n node: tree.children,\n paddingLength,\n getNodes: ({nodes}) => (nodes ?? []).slice().sort(compareLevels),\n getMessage: (marker) => [logSymbols[marker.level], marker.message].join(' '),\n })\n\n const rootErrors = formatRootErrors(tree, childErrors.length > 0, paddingLength)\n\n return [header, rootErrors, childErrors].filter(Boolean).join('\\n')\n}\n","import {type BuiltInValidationReporter} from '../../validateAction'\nimport {formatDocumentValidation} from './formatDocumentValidation'\nimport {\n count,\n type DocumentValidationResult,\n type Level,\n levelValues,\n percent,\n seconds,\n summary,\n} from './util'\n\n/**\n * Represents the default stylish/pretty reporter\n */\n// eslint-disable-next-line max-statements\nexport const pretty: BuiltInValidationReporter = async ({output, worker, flags}) => {\n const workspaceLoadStart = Date.now()\n // Report workspace loaded\n const spinner = output\n .spinner(flags.workspace ? `Loading workspace '${flags.workspace}'…` : 'Loading workspace…')\n .start()\n\n const workspace = await worker.event.loadedWorkspace()\n spinner.succeed(\n `Loaded workspace '${workspace.name}' using project '${workspace.projectId}' and dataset '${\n flags.dataset || workspace.dataset\n }' ${seconds(workspaceLoadStart)}`,\n )\n\n if (!flags.file) {\n // Report document count\n spinner.start('Calculating documents to be validated…')\n const {documentCount} = await worker.event.loadedDocumentCount()\n\n // Report export progress\n const downloadStart = Date.now()\n spinner.text = `Downloading ${count(documentCount, 'documents')}…`\n for await (const {downloadedCount} of worker.stream.exportProgress()) {\n const percentage = percent(downloadedCount / documentCount)\n spinner.text = `Downloading ${count(documentCount, 'documents')}… ${percentage}`\n }\n spinner.succeed(`Downloaded ${count(documentCount, 'documents')} ${seconds(downloadStart)}`)\n }\n\n const {totalDocumentsToValidate} = await worker.event.exportFinished()\n\n const referenceIntegrityStart = Date.now()\n spinner.start(`Checking reference existence…`)\n await worker.event.loadedReferenceIntegrity()\n spinner.succeed(`Checked all references ${seconds(referenceIntegrityStart)}`)\n\n // Report validation progress\n const validationStart = Date.now()\n spinner.start(`Validating ${count(totalDocumentsToValidate, 'documents')}…`)\n\n const results: DocumentValidationResult[] = []\n\n const totals = {\n valid: {documents: 0},\n errors: {documents: 0, markers: 0},\n warnings: {documents: 0, markers: 0},\n infos: {documents: 0, markers: 0},\n }\n\n for await (const {validatedCount, ...result} of worker.stream.validation()) {\n const {markers} = result\n\n if (markers.length) {\n results.push(result)\n }\n\n const errors = markers.filter((marker) => marker.level === 'error')\n const warnings = markers.filter((marker) => marker.level === 'warning')\n const infos = markers.filter((marker) => marker.level === 'info')\n\n if (!markers.length) {\n totals.valid.documents += 1\n }\n\n if (errors.length) {\n totals.errors.documents += 1\n totals.errors.markers += errors.length\n }\n\n if (warnings.length) {\n totals.warnings.documents += 1\n totals.warnings.markers += warnings.length\n }\n\n if (infos.length) {\n totals.infos.documents += 1\n totals.infos.markers += infos.length\n }\n\n spinner.text =\n `Validating ${count(totalDocumentsToValidate, 'documents')}…\\n\\n` +\n `Processed ${count(validatedCount, 'documents')} (${percent(\n validatedCount / totalDocumentsToValidate,\n )}):\\n${summary(totals, flags.level)}`\n }\n\n spinner.succeed(\n `Validated ${count(totalDocumentsToValidate, 'documents')} ${seconds(validationStart)}`,\n )\n output.print(`\\nValidation results:\\n${summary(totals, flags.level)}`)\n\n results.sort((a, b) => {\n if (a.level === b.level) return a.documentType.localeCompare(b.documentType)\n return levelValues[a.level] - levelValues[b.level]\n })\n\n let overallLevel: Level = 'info'\n\n for (const result of results) {\n if (result.level === 'error') overallLevel = 'error'\n if (result.level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n\n output.print(`${formatDocumentValidation(result)}\\n`)\n }\n\n await worker.dispose()\n\n return overallLevel\n}\n","import {json} from './jsonReporter'\nimport {ndjson} from './ndjsonReporter'\nimport {pretty} from './prettyReporter'\n\nexport const reporters = {pretty, ndjson, json}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import path from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {type ClientConfig} from '@sanity/client'\nimport {type ValidationMarker} from '@sanity/types'\nimport readPkgUp from 'read-pkg-up'\n\nimport {\n type ValidateDocumentsWorkerData,\n type ValidationWorkerChannel,\n} from '../../threads/validateDocuments'\nimport {createReceiver, type WorkerChannelReceiver} from '../../util/workerChannels'\n\nconst __dirname = path.dirname(new URL(import.meta.url).pathname)\n\nexport interface ValidateDocumentsOptions<TReturn = unknown> {\n level?: 'error' | 'warning' | 'info'\n workspace?: string\n workDir?: string\n configPath?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string // override\n dataset?: string // override\n ndjsonFilePath?: string\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n reporter?: (worker: WorkerChannelReceiver<ValidationWorkerChannel>) => TReturn\n studioHost?: string\n}\n\nexport interface DocumentValidationResult {\n documentId: string\n documentType: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n}\n\nconst defaultReporter = ({stream, dispose}: WorkerChannelReceiver<ValidationWorkerChannel>) => {\n async function* createValidationGenerator() {\n for await (const {documentId, documentType, markers, revision, level} of stream.validation()) {\n const result: DocumentValidationResult = {\n documentId,\n documentType,\n revision,\n level,\n markers,\n }\n\n yield result\n }\n\n await dispose()\n }\n\n return createValidationGenerator()\n}\n\nexport function validateDocuments<TReturn>(\n options: ValidateDocumentsOptions<TReturn> &\n Required<Pick<ValidateDocumentsOptions<TReturn>, 'reporter'>>,\n): TReturn\nexport function validateDocuments(\n options: ValidateDocumentsOptions,\n): AsyncIterable<DocumentValidationResult>\nexport function validateDocuments(options: ValidateDocumentsOptions): unknown {\n const {\n workspace,\n clientConfig,\n configPath,\n dataset,\n projectId,\n workDir = process.cwd(),\n reporter = defaultReporter,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n ndjsonFilePath,\n } = options\n\n const rootPkgPath = readPkgUp.sync({cwd: __dirname})?.path\n if (!rootPkgPath) {\n throw new Error('Could not find root directory for `sanity` package')\n }\n\n const workerPath = path.join(\n path.dirname(rootPkgPath),\n 'lib',\n '_internal',\n 'cli',\n 'threads',\n 'validateDocuments.cjs',\n )\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n // removes props in the config that make this object fail to serialize\n clientConfig: JSON.parse(JSON.stringify(clientConfig)),\n configPath,\n workspace,\n dataset,\n projectId,\n level,\n ndjsonFilePath,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost: options.studioHost,\n } satisfies ValidateDocumentsWorkerData,\n env: process.env,\n })\n\n return reporter(createReceiver<ValidationWorkerChannel>(worker))\n}\n","import fs from 'node:fs'\nimport path from 'node:path'\n\nimport {\n type CliCommandArguments,\n type CliCommandContext,\n type CliConfig,\n type CliOutputter,\n} from '@sanity/cli'\nimport {type ClientConfig} from '@sanity/client'\nimport chalk from 'chalk'\nimport logSymbols from 'log-symbols'\n\nimport {type ValidationWorkerChannel} from '../../threads/validateDocuments'\nimport {type WorkerChannelReceiver} from '../../util/workerChannels'\nimport {reporters} from './reporters'\nimport {validateDocuments} from './validateDocuments'\n\ninterface ValidateFlags {\n 'workspace'?: string\n 'format'?: string\n 'dataset'?: string\n 'file'?: string\n 'level'?: 'error' | 'warning' | 'info'\n 'max-custom-validation-concurrency'?: number\n 'max-fetch-concurrency'?: number\n 'yes'?: boolean\n 'y'?: boolean\n}\n\nexport type BuiltInValidationReporter = (options: {\n output: CliOutputter\n worker: WorkerChannelReceiver<ValidationWorkerChannel>\n flags: ValidateFlags\n}) => Promise<'error' | 'warning' | 'info'>\n\nexport default async function validateAction(\n args: CliCommandArguments<ValidateFlags>,\n {apiClient, workDir, output, cliConfig, prompt}: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const unattendedMode = Boolean(flags.yes || flags.y)\n\n if (!unattendedMode) {\n output.print(\n `${chalk.yellow(`${logSymbols.warning} Warning:`)} This command ${\n flags.file\n ? 'reads all documents from your input file'\n : 'downloads all documents from your dataset'\n } and processes them through your local schema within a ` +\n `simulated browser environment.\\n`,\n )\n output.print(`Potential pitfalls:\\n`)\n output.print(\n `- Processes all documents locally (excluding assets). Large datasets may require more resources.`,\n )\n output.print(\n `- Executes all custom validation functions. Some functions may need to be refactored for compatibility.`,\n )\n output.print(\n `- Not all standard browser features are available and may cause issues while loading your Studio.`,\n )\n output.print(\n `- Adheres to document permissions. Ensure this account can see all desired documents.`,\n )\n if (flags.file) {\n output.print(\n `- Checks for missing document references against the live dataset if not found in your file.`,\n )\n }\n\n const confirmed = await prompt.single<boolean>({\n type: 'confirm',\n message: `Are you sure you want to continue?`,\n default: true,\n })\n\n if (!confirmed) {\n output.print('User aborted')\n process.exitCode = 1\n return\n }\n }\n\n if (flags.format && !(flags.format in reporters)) {\n const formatter = new Intl.ListFormat('en-US', {\n style: 'long',\n type: 'conjunction',\n })\n throw new Error(\n `Did not recognize format '${flags.format}'. Available formats are ${formatter.format(\n Object.keys(reporters).map((key) => `'${key}'`),\n )}`,\n )\n }\n\n const level = flags.level || 'warning'\n\n if (level !== 'error' && level !== 'warning' && level !== 'info') {\n throw new Error(`Invalid level. Available levels are 'error', 'warning', and 'info'.`)\n }\n\n const maxCustomValidationConcurrency = flags['max-custom-validation-concurrency']\n if (\n maxCustomValidationConcurrency &&\n typeof maxCustomValidationConcurrency !== 'number' &&\n !Number.isInteger(maxCustomValidationConcurrency)\n ) {\n throw new Error(`'--max-custom-validation-concurrency' must be an integer.`)\n }\n\n const maxFetchConcurrency = flags['max-fetch-concurrency']\n if (\n maxFetchConcurrency &&\n typeof maxFetchConcurrency !== 'number' &&\n !Number.isInteger(maxFetchConcurrency)\n ) {\n throw new Error(`'--max-fetch-concurrency' must be an integer.`)\n }\n\n const clientConfig: Partial<ClientConfig> = {\n ...apiClient({\n requireUser: true,\n requireProject: false, // we'll get this from the workspace\n }).config(),\n // we set this explictly to true because the default client configuration\n // from the CLI comes configured with `useProjectHostname: false` when\n // `requireProject` is set to false\n useProjectHostname: true,\n // we set this explictly to true because we pass in a token via the\n // `clientConfiguration` object and also mock a browser environment in\n // this worker which triggers the browser warning\n ignoreBrowserTokenWarning: true,\n }\n\n let ndjsonFilePath\n if (flags.file) {\n if (typeof flags.file !== 'string') {\n throw new Error(`'--file' must be a string`)\n }\n const filePath = path.resolve(workDir, flags.file)\n\n const stat = await fs.promises.stat(filePath)\n if (!stat.isFile()) {\n throw new Error(`'--file' must point to a valid ndjson file or tarball`)\n }\n\n ndjsonFilePath = filePath\n }\n\n const overallLevel = await validateDocuments({\n workspace: flags.workspace,\n dataset: flags.dataset,\n clientConfig,\n workDir,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n ndjsonFilePath,\n reporter: (worker) => {\n const reporter =\n flags.format && flags.format in reporters\n ? reporters[flags.format as keyof typeof reporters]\n : reporters.pretty\n\n return reporter({output, worker, flags})\n },\n studioHost: (cliConfig as CliConfig)?.studioHost,\n })\n\n process.exitCode = overallLevel === 'error' ? 1 : 0\n}\n"],"names":["arrayFromAsync","iterable","results","item","push","json","output","worker","formatted","stream","validation","filter","markers","length","map","validatedCount","result","dispose","print","JSON","stringify","overallLevel","level","ndjson","isTty","isatty","levelValues","error","warning","info","count","amount","subject","toLocaleString","slice","Math","max","percentageFormatter","Intl","NumberFormat","style","minimumFractionDigits","maximumFractionDigits","percent","value","format","min","secondFormatter","seconds","startTime","endTime","Date","now","summary","errors","infos","valid","warnings","levelValue","logSymbols","success","documents","Boolean","join","levelHeaders","chalk","bold","bgRed","black","red","bgYellow","yellow","cyan","link","text","url","underline","compareLevels","a","b","formatRootErrors","root","hasChildren","paddingLength","nodes","first","rest","sort","firstElbow","firstPadding","repeat","firstLine","message","subsequentPadding","subsequentElbow","restOfLines","marker","formatDocumentValidation","documentId","documentType","intentUrl","tree","convertToTree","documentTypeHeader","bgWhite","header","maxKeyLength","children","childErrors","formatTree","node","getNodes","getMessage","rootErrors","pretty","flags","workspaceLoadStart","spinner","workspace","start","event","loadedWorkspace","succeed","name","projectId","dataset","file","documentCount","loadedDocumentCount","downloadStart","downloadedCount","exportProgress","percentage","totalDocumentsToValidate","exportFinished","referenceIntegrityStart","loadedReferenceIntegrity","validationStart","totals","localeCompare","reporters","MessageQueue","resolver","queue","done","next","Promise","resolve","shift","end","undefined","isWorkerChannelMessage","type","includes","createReceiver","_events","Map","_streams","eventQueue","get","has","set","streamQueue","handleMessage","handleError","addListener","Proxy","target","race","payload","prop","streamReceiver","removeListener","terminate","__dirname","path","dirname","URL","import","pathname","defaultReporter","createValidationGenerator","revision","validateDocuments","options","clientConfig","configPath","workDir","process","cwd","reporter","maxCustomValidationConcurrency","maxFetchConcurrency","ndjsonFilePath","rootPkgPath","readPkgUp","sync","Error","workerPath","Worker","workerData","parse","studioHost","env","validateAction","args","apiClient","cliConfig","prompt","extOptions","yes","y","single","default","exitCode","formatter","ListFormat","Object","keys","key","Number","isInteger","requireUser","requireProject","config","useProjectHostname","ignoreBrowserTokenWarning","filePath","fs","promises","stat","isFile"],"mappings":";;;;;;;;AAGA,eAAeA,eAAkBC,UAA4B;AAC3D,QAAMC,UAAe,CAAA;AACrB,mBAAiBC,QAAQF,SAAUC,SAAQE,KAAKD,IAAI;AACpD,SAAOD;AACT;AAEO,MAAMG,OAAkC,OAAO;AAAA,EAACC;AAAAA,EAAQC;AAAM,MAAM;AAEzE,QAAMC,aADU,MAAMR,eAAeO,OAAOE,OAAOC,WAAAA,CAAY,GAG5DC,OAAO,CAAC;AAAA,IAACC;AAAAA,EAAAA,MAAaA,QAAQC,MAAM,EAEpCC,IAAI,CAAC;AAAA,IAACC;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,MAAYA,MAAM;AAE9C,QAAMT,OAAOU,WAEbX,OAAOY,MAAMC,KAAKC,UAAUZ,SAAS,CAAC;AAEtC,MAAIa,eAA6C;AAEjD,aAAW;AAAA,IAACC;AAAAA,EAAAA,KAAUd;AAChBc,cAAU,YAASD,eAAe,UAClCC,UAAU,aAAaD,iBAAiB,YAASA,eAAe;AAGtE,SAAOA;AACT,GC3BaE,SAAoC,OAAO;AAAA,EAACjB;AAAAA,EAAQC;AAAM,MAAM;AAC3E,MAAIc,eAA6C;AAEjD,mBAAiB;AAAA,IAACN;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,KAAWT,OAAOE,OAAOC,WAAAA;AACxDM,WAAOM,UAAU,YAASD,eAAe,UACzCL,OAAOM,UAAU,aAAaD,iBAAiB,YAASA,eAAe,YAEvEL,OAAOJ,QAAQC,UACjBP,OAAOY,MAAMC,KAAKC,UAAUJ,MAAM,CAAC;AAIvC,SAAA,MAAMT,OAAOU,WAENI;AACT,GCDaG,QAAQC,OAAO,CAAC,GAChBC,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAM5CC,QAAQA,CAACC,QAAgBC,YACpC,GAAGD,OAAOE,eAAe,OAAO,CAAC,IAC/BF,WAAW,IAAIC,QAAQE,MAAM,GAAGC,KAAKC,IAAI,GAAGJ,QAAQnB,SAAS,CAAC,CAAC,IAAImB,OAAO,IAGxEK,sBAAsB,IAAIC,KAAKC,aAAa,SAAS;AAAA,EACzDC,OAAO;AAAA,EACPC,uBAAuB;AAAA,EACvBC,uBAAuB;AACzB,CAAC,GAKYC,UAAWC,WAA0BP,oBAAoBQ,OAAOV,KAAKW,IAAIF,OAAO,CAAC,CAAC,GAEzFG,kBAAkB,IAAIT,KAAKC,aAAa,SAAS;AAAA,EACrDE,uBAAuB;AAAA,EACvBC,uBAAuB;AACzB,CAAC,GAMYM,UAAWC,CAAAA,cAA8B;AACpD,QAAMC,UAAUC,KAAKC,IAAAA;AACrB,SAAO,IAAIL,gBAAgBF,QAAQK,UAAUD,aAAa,GAAI,CAAC;AACjE,GAaaI,UAAUA,CACrB;AAAA,EAACC;AAAAA,EAAQC;AAAAA,EAAOC;AAAAA,EAAOC;AAA0B,GACjDnC,QAAe,cACJ;AACX,QAAMoC,aAAahC,YAAYJ,KAAK;AAEpC,SAAO,CACL,GAAGqC,WAAWC,OAAO,cAAc9B,MAAM0B,MAAMK,WAAW,WAAW,CAAC,IACtE,GAAGF,WAAWhC,KAAK,cAAcG,MAAMwB,OAAOO,WAAW,WAAW,CAAC,KAAK/B,MACxEwB,OAAO1C,SACP,QACF,CAAC,IACD8C,cAAchC,YAAYE,WACxB,GAAG+B,WAAW/B,OAAO,cAAcE,MAAM2B,SAASI,WAAW,WAAW,CAAC,KAAK/B,MAC5E2B,SAAS7C,SACT,UACF,CAAC,IACH8C,cAAchC,YAAYG,QACxB,GAAG8B,WAAW9B,IAAI,cAAcC,MAAMyB,MAAMM,WAAW,WAAW,CAAC,KAAK/B,MACtEyB,MAAMM,WACN,SACF,CAAC,EAAE,EAEJlD,OAAOmD,OAAO,EACdC,KAAK;AAAA,CAAI;AACd,GCvEMC,eAAe;AAAA,EACnBrC,OAAOH,QAAQyC,MAAMC,KAAKD,MAAME,MAAMF,MAAMG,MAAM,SAAS,CAAC,CAAC,IAAIH,MAAMI,IAAI,SAAS;AAAA,EACpFzC,SAASJ,QAAQyC,MAAMC,KAAKD,MAAMK,SAASL,MAAMG,MAAM,QAAQ,CAAC,CAAC,IAAIH,MAAMM,OAAO,QAAQ;AAAA,EAC1F1C,MAAML,QAAQyC,MAAMC,KAAKD,MAAMO,KAAKP,MAAMG,MAAM,QAAQ,CAAC,CAAC,IAAIH,MAAMO,KAAK,QAAQ;AACnF,GAKMC,OAAOA,CAACC,MAAcC,QAC1BnD,QAAQ,WAAamD,GAAG,OAASD,IAAI,iBAAqBT,MAAMW,UAAUF,IAAI,GAK1EG,gBAAgBA,CAA4CC,GAAMC,MACtErD,YAAYoD,EAAExD,KAAK,IAAII,YAAYqD,EAAEzD,KAAK,GAKtC0D,mBAAmBA,CAACC,MAAsBC,aAAsBC,kBAA0B;AAC9F,MAAI,CAACF,KAAKG,MAAO,QAAO;AAExB,QAAM,CAACC,OAAO,GAAGC,IAAI,IAAIL,KAAKG,MAAMlD,MAAAA,EAAQqD,KAAKV,aAAa;AAC9D,MAAI,CAACQ,MAAO,QAAO;AAEnB,QAAMG,aAAaN,cAAc,YAAO,gBAClCO,eAAe,IAAIC,OAAOP,gBAAgB,CAAC,GAC3CQ,YAAY,GAAGH,UAAU,WAAWC,YAAY,IAAI9B,WAAW0B,MAAM/D,KAAK,CAAC,IAC/E+D,MAAMO,OAAO,IAETC,oBAAoB,IAAIH,OAAOP,gBAAgB,CAAC,GAChDW,kBAAkBZ,cAAc,YAAO,MAEvCa,cAAcT,KACjBxE,IACEkF,CAAAA,WACC,GAAGF,eAAe,GAAGD,iBAAiB,IAAIlC,WAAWqC,OAAO1E,KAAK,CAAC,IAAI0E,OAAOJ,OAAO,EACxF,EACC7B,KAAK;AAAA,CAAI;AACZ,SAAO,CAAC4B,WAAWI,WAAW,EAAEpF,OAAOmD,OAAO,EAAEC,KAAK;AAAA,CAAI;AAC3D;AAKO,SAASkC,yBAAyB;AAAA,EACvCC;AAAAA,EACAC;AAAAA,EACA7E;AAAAA,EACAV;AAAAA,EACAwF;AAC+B,GAAW;AAC1C,QAAMC,OAAOC,cAAsB1F,OAAO,GAEpC2F,qBAAqB/E,QACvByC,MAAMuC,QAAQvC,MAAMG,MAAM,IAAI+B,YAAY,GAAG,CAAC,IAC9C,IAAIA,YAAY,KAEdM,SAAS,GAAGzC,aAAa1C,KAAK,CAAC,IAAIiF,kBAAkB,IACzDH,YAAY3B,KAAKyB,YAAYE,SAAS,IAAInC,MAAMW,UAAUsB,UAAU,CAAC,IAGjEf,gBAAgBhD,KAAKC,IAAIsE,aAAaL,KAAKM,QAAQ,IAAI,GAAG,EAAE,GAE5DC,cAAcC,WAAmB;AAAA,IACrCC,MAAMT,KAAKM;AAAAA,IACXxB;AAAAA,IACA4B,UAAUA,CAAC;AAAA,MAAC3B;AAAAA,IAAAA,OAAYA,SAAS,CAAA,GAAIlD,MAAAA,EAAQqD,KAAKV,aAAa;AAAA,IAC/DmC,YAAahB,CAAAA,WAAW,CAACrC,WAAWqC,OAAO1E,KAAK,GAAG0E,OAAOJ,OAAO,EAAE7B,KAAK,GAAG;AAAA,EAAA,CAC5E,GAEKkD,aAAajC,iBAAiBqB,MAAMO,YAAY/F,SAAS,GAAGsE,aAAa;AAE/E,SAAO,CAACsB,QAAQQ,YAAYL,WAAW,EAAEjG,OAAOmD,OAAO,EAAEC,KAAK;AAAA,CAAI;AACpE;AC9EO,MAAMmD,SAAoC,OAAO;AAAA,EAAC5G;AAAAA,EAAQC;AAAAA,EAAQ4G;AAAK,MAAM;AAClF,QAAMC,qBAAqBjE,KAAKC,OAE1BiE,UAAU/G,OACb+G,QAAQF,MAAMG,YAAY,sBAAsBH,MAAMG,SAAS,YAAO,yBAAoB,EAC1FC,SAEGD,YAAY,MAAM/G,OAAOiH,MAAMC,gBAAAA;AAOrC,MANAJ,QAAQK,QACN,qBAAqBJ,UAAUK,IAAI,oBAAoBL,UAAUM,SAAS,kBACxET,MAAMU,WAAWP,UAAUO,OAAO,KAC/B7E,QAAQoE,kBAAkB,CAAC,EAClC,GAEI,CAACD,MAAMW,MAAM;AAEfT,YAAQE,MAAM,6CAAwC;AACtD,UAAM;AAAA,MAACQ;AAAAA,IAAAA,IAAiB,MAAMxH,OAAOiH,MAAMQ,uBAGrCC,gBAAgB9E,KAAKC,IAAAA;AAC3BiE,YAAQ3C,OAAO,eAAe5C,MAAMiG,eAAe,WAAW,CAAC;AAC/D,qBAAiB;AAAA,MAACG;AAAAA,IAAAA,KAAoB3H,OAAOE,OAAO0H,kBAAkB;AACpE,YAAMC,aAAazF,QAAQuF,kBAAkBH,aAAa;AAC1DV,cAAQ3C,OAAO,eAAe5C,MAAMiG,eAAe,WAAW,CAAC,UAAKK,UAAU;AAAA,IAChF;AACAf,YAAQK,QAAQ,cAAc5F,MAAMiG,eAAe,WAAW,CAAC,IAAI/E,QAAQiF,aAAa,CAAC,EAAE;AAAA,EAC7F;AAEA,QAAM;AAAA,IAACI;AAAAA,EAAAA,IAA4B,MAAM9H,OAAOiH,MAAMc,kBAEhDC,0BAA0BpF,KAAKC,IAAAA;AACrCiE,UAAQE,MAAM,oCAA+B,GAC7C,MAAMhH,OAAOiH,MAAMgB,yBAAAA,GACnBnB,QAAQK,QAAQ,0BAA0B1E,QAAQuF,uBAAuB,CAAC,EAAE;AAG5E,QAAME,kBAAkBtF,KAAKC,IAAAA;AAC7BiE,UAAQE,MAAM,cAAczF,MAAMuG,0BAA0B,WAAW,CAAC,QAAG;AAE3E,QAAMnI,UAAsC,CAAA,GAEtCwI,SAAS;AAAA,IACblF,OAAO;AAAA,MAACK,WAAW;AAAA,IAAA;AAAA,IACnBP,QAAQ;AAAA,MAACO,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,IAChC6C,UAAU;AAAA,MAACI,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,IAClC2C,OAAO;AAAA,MAACM,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,EAAC;AAGlC,mBAAiB;AAAA,IAACG;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,KAAWT,OAAOE,OAAOC,cAAc;AAC1E,UAAM;AAAA,MAACE;AAAAA,IAAAA,IAAWI;AAEdJ,YAAQC,UACVX,QAAQE,KAAKY,MAAM;AAGrB,UAAMsC,SAAS1C,QAAQD,OAAQqF,CAAAA,WAAWA,OAAO1E,UAAU,OAAO,GAC5DmC,WAAW7C,QAAQD,OAAQqF,YAAWA,OAAO1E,UAAU,SAAS,GAChEiC,QAAQ3C,QAAQD,OAAQqF,CAAAA,WAAWA,OAAO1E,UAAU,MAAM;AAE3DV,YAAQC,WACX6H,OAAOlF,MAAMK,aAAa,IAGxBP,OAAOzC,WACT6H,OAAOpF,OAAOO,aAAa,GAC3B6E,OAAOpF,OAAO1C,WAAW0C,OAAOzC,SAG9B4C,SAAS5C,WACX6H,OAAOjF,SAASI,aAAa,GAC7B6E,OAAOjF,SAAS7C,WAAW6C,SAAS5C,SAGlC0C,MAAM1C,WACR6H,OAAOnF,MAAMM,aAAa,GAC1B6E,OAAOnF,MAAM3C,WAAW2C,MAAM1C,SAGhCwG,QAAQ3C,OACN,cAAc5C,MAAMuG,0BAA0B,WAAW,CAAC;AAAA;AAAA,YAC7CvG,MAAMf,gBAAgB,WAAW,CAAC,KAAK4B,QAClD5B,iBAAiBsH,wBACnB,CAAC;AAAA,EAAOhF,QAAQqF,QAAQvB,MAAM7F,KAAK,CAAC;AAAA,EACxC;AAEA+F,UAAQK,QACN,aAAa5F,MAAMuG,0BAA0B,WAAW,CAAC,IAAIrF,QAAQyF,eAAe,CAAC,EACvF,GACAnI,OAAOY,MAAM;AAAA;AAAA,EAA0BmC,QAAQqF,QAAQvB,MAAM7F,KAAK,CAAC,EAAE,GAErEpB,QAAQqF,KAAK,CAACT,GAAGC,MACXD,EAAExD,UAAUyD,EAAEzD,QAAcwD,EAAEqB,aAAawC,cAAc5D,EAAEoB,YAAY,IACpEzE,YAAYoD,EAAExD,KAAK,IAAII,YAAYqD,EAAEzD,KAAK,CAClD;AAED,MAAID,eAAsB;AAE1B,aAAWL,UAAUd;AACfc,WAAOM,UAAU,YAASD,eAAe,UACzCL,OAAOM,UAAU,aAAaD,iBAAiB,YAASA,eAAe,YAE3Ef,OAAOY,MAAM,GAAG+E,yBAAyBjF,MAAM,CAAC;AAAA,CAAI;AAGtD,SAAA,MAAMT,OAAOU,WAENI;AACT,GCxHauH,YAAY;AAAA,EAAC1B;AAAAA,EAAQ3F;AAAAA,EAAQlB;AAAI;ACmE9C,MAAMwI,aAAgB;AAAA,EACpBC,WAAyD;AAAA,EACzDC,QAAa,CAAA;AAAA,EAEb3I,KAAKwF,SAAY;AACX,SAAKkD,YACP,KAAKA,SAAS;AAAA,MAAClG,OAAOgD;AAAAA,MAASoD,MAAM;AAAA,IAAA,CAAM,GAC3C,KAAKF,WAAW,QAEhB,KAAKC,MAAM3I,KAAKwF,OAAO;AAAA,EAE3B;AAAA,EAEAqD,OAAmC;AACjC,WAAI,KAAKF,MAAMlI,SACNqI,QAAQC,QAAQ;AAAA,MAACvG,OAAO,KAAKmG,MAAMK,MAAAA;AAAAA,MAAUJ,MAAM;AAAA,IAAA,CAAM,IAG3D,IAAIE,QAASC,CAAAA,YAAa,KAAKL,WAAWK,OAAQ;AAAA,EAC3D;AAAA,EAEAE,MAAM;AACA,SAAKP,YACP,KAAKA,SAAS;AAAA,MAAClG,OAAO0G;AAAAA,MAAWN,MAAM;AAAA,IAAA,CAAK;AAAA,EAEhD;AACF;AAEA,SAASO,uBAAuB3D,SAAmD;AAIjF,SAHI,OAAOA,WAAY,YACnB,CAACA,WACD,EAAE,UAAUA,YACZ,OAAOA,QAAQ4D,QAAS,WAAiB,KACrB,CAAC,SAAS,YAAY,KAAK,EACtCC,SAAS7D,QAAQ4D,IAAI;AACpC;AAOO,SAASE,eACdnJ,QACuC;AACvC,QAAMoJ,UAAU,oBAAIC,IAAAA,GACdC,WAAW,oBAAID,OACftG,SAAS,IAAIuF,aAAAA,GAEbiB,aAAcnC,CAAAA,SAAiB;AACnC,UAAMoB,QAAQY,QAAQI,IAAIpC,IAAI,KAAK,IAAIkB,aAAAA;AACvC,WAAKc,QAAQK,IAAIrC,IAAI,KAAGgC,QAAQM,IAAItC,MAAMoB,KAAK,GACxCA;AAAAA,EACT,GAEMmB,cAAevC,CAAAA,SAAiB;AACpC,UAAMoB,QAAQc,SAASE,IAAIpC,IAAI,KAAK,IAAIkB,aAAAA;AACxC,WAAKgB,SAASG,IAAIrC,IAAI,KAAGkC,SAASI,IAAItC,MAAMoB,KAAK,GAC1CA;AAAAA,EACT,GAEMoB,gBAAiBvE,CAAAA,YAAqB;AACrC2D,2BAAuB3D,OAAO,MAC/BA,QAAQ4D,SAAS,WAASM,WAAWlE,QAAQ+B,IAAI,EAAEvH,KAAKwF,OAAO,GAC/DA,QAAQ4D,SAAS,cAAYU,YAAYtE,QAAQ+B,IAAI,EAAEvH,KAAKwF,OAAO,GACnEA,QAAQ4D,SAAS,SAAOU,YAAYtE,QAAQ+B,IAAI,EAAE0B,IAAAA;AAAAA,EACxD,GAEMe,cAAezI,CAAAA,UAAmB;AACtC2B,WAAOlD,KAAK;AAAA,MAACoJ,MAAM;AAAA,MAAS7H;AAAAA,IAAAA,CAAM;AAAA,EACpC;AAEApB,SAAAA,OAAO8J,YAAY,WAAWF,aAAa,GAC3C5J,OAAO8J,YAAY,SAASD,WAAW,GAEhC;AAAA,IACL5C,OAAO,IAAI8C,MAAM,IAAsD;AAAA,MACrEP,KAAKA,CAACQ,QAAQ5C,SACR,OAAOA,QAAS,WAAiB4C,OAAO5C,IAAI,IAEX,YAAY;AAC/C,cAAM;AAAA,UAAC/E;AAAAA,QAAAA,IAAS,MAAMsG,QAAQsB,KAAK,CAACV,WAAWnC,IAAI,EAAEsB,QAAQ3F,OAAO2F,KAAAA,CAAM,CAAC;AAC3E,YAAIrG,MAAM4G,SAAS,QAAS,OAAM5G,MAAMjB;AACxC,eAAOiB,MAAM6H;AAAAA,MACf;AAAA,IAAA,CAIH;AAAA,IACDhK,QAAQ,IAAI6J,MAAM,IAAuD;AAAA,MACvEP,KAAKA,CAACQ,QAAQG,SAAS;AACrB,YAAI,OAAOA,QAAS,SAAU,QAAOH,OAAOG,IAAI;AAChD,cAAM/C,OAAO+C;AAEb,wBAAgBC,iBAAiB;AAC/B,qBAAa;AACX,kBAAM;AAAA,cAAC/H;AAAAA,cAAOoG;AAAAA,YAAAA,IAAQ,MAAME,QAAQsB,KAAK,CAACN,YAAYvC,IAAI,EAAEsB,QAAQ3F,OAAO2F,KAAAA,CAAM,CAAC;AAClF,gBAAID,KAAM;AACV,gBAAIpG,MAAM4G,SAAS,QAAS,OAAM5G,MAAMjB;AACxC,kBAAMiB,MAAM6H;AAAAA,UACd;AAAA,QACF;AAEA,eAAOE;AAAAA,MACT;AAAA,IAAA,CACD;AAAA,IACD1J,SAASA,OACPV,OAAOqK,eAAe,WAAWT,aAAa,GAC9C5J,OAAOqK,eAAe,SAASR,WAAW,GACnC7J,OAAOsK,UAAAA;AAAAA,EAAU;AAG9B;AC1KA,MAAMC,cAAYC,KAAKC,QAAQ,IAAIC,IAAIC,YAAYvG,GAAG,EAAEwG,QAAQ,GAyB1DC,kBAAkBA,CAAC;AAAA,EAAC3K;AAAAA,EAAQQ;AAAuD,MAAM;AAC7F,kBAAgBoK,4BAA4B;AAC1C,qBAAiB;AAAA,MAACnF;AAAAA,MAAYC;AAAAA,MAAcvF;AAAAA,MAAS0K;AAAAA,MAAUhK;AAAAA,IAAAA,KAAUb,OAAOC,WAAAA;AAS9E,YARyC;AAAA,QACvCwF;AAAAA,QACAC;AAAAA,QACAmF;AAAAA,QACAhK;AAAAA,QACAV;AAAAA,MAAAA;AAMJ,UAAMK,QAAAA;AAAAA,EACR;AAEA,SAAOoK,0BAAAA;AACT;AASO,SAASE,kBAAkBC,SAA4C;AAC5E,QAAM;AAAA,IACJlE;AAAAA,IACAmE;AAAAA,IACAC;AAAAA,IACA7D;AAAAA,IACAD;AAAAA,IACA+D,UAAUC,QAAQC,IAAAA;AAAAA,IAClBC,WAAWV;AAAAA,IACX9J;AAAAA,IACAyK;AAAAA,IACAC;AAAAA,IACAC;AAAAA,EAAAA,IACET,SAEEU,cAAcC,UAAUC,KAAK;AAAA,IAACP,KAAKf;AAAAA,EAAAA,CAAU,GAAGC;AACtD,MAAI,CAACmB;AACH,UAAM,IAAIG,MAAM,oDAAoD;AAGtE,QAAMC,aAAavB,KAAKhH,KACtBgH,KAAKC,QAAQkB,WAAW,GACxB,OACA,aACA,OACA,WACA,uBACF,GAEM3L,SAAS,IAAIgM,OAAOD,YAAY;AAAA,IACpCE,YAAY;AAAA,MACVb;AAAAA;AAAAA,MAEAF,cAActK,KAAKsL,MAAMtL,KAAKC,UAAUqK,YAAY,CAAC;AAAA,MACrDC;AAAAA,MACApE;AAAAA,MACAO;AAAAA,MACAD;AAAAA,MACAtG;AAAAA,MACA2K;AAAAA,MACAF;AAAAA,MACAC;AAAAA,MACAU,YAAYlB,QAAQkB;AAAAA,IAAAA;AAAAA,IAEtBC,KAAKf,QAAQe;AAAAA,EAAAA,CACd;AAED,SAAOb,SAASpC,eAAwCnJ,MAAM,CAAC;AACjE;AC7EA,eAA8BqM,eAC5BC,MACA;AAAA,EAACC;AAAAA,EAAWnB;AAAAA,EAASrL;AAAAA,EAAQyM;AAAAA,EAAWC;AAAyB,GAClD;AACf,QAAM7F,QAAQ0F,KAAKI;AAGnB,MAAI,EAF2B9F,MAAM+F,OAAO/F,MAAMgG,OAGhD7M,OAAOY,MACL,GAAG+C,MAAMM,OAAO,GAAGZ,WAAW/B,OAAO,WAAW,CAAC,iBAC/CuF,MAAMW,OACF,6CACA,2CAA2C;AAAA,CAGnD,GACAxH,OAAOY,MAAM;AAAA,CAAuB,GACpCZ,OAAOY,MACL,kGACF,GACAZ,OAAOY,MACL,yGACF,GACAZ,OAAOY,MACL,mGACF,GACAZ,OAAOY,MACL,uFACF,GACIiG,MAAMW,QACRxH,OAAOY,MACL,8FACF,GASE,CANc,MAAM8L,OAAOI,OAAgB;AAAA,IAC7C5D,MAAM;AAAA,IACN5D,SAAS;AAAA,IACTyH,SAAS;AAAA,EAAA,CACV,IAEe;AACd/M,WAAOY,MAAM,cAAc,GAC3B0K,QAAQ0B,WAAW;AACnB;AAAA,EACF;AAGF,MAAInG,MAAMtE,UAAU,EAAEsE,MAAMtE,UAAU+F,YAAY;AAChD,UAAM2E,YAAY,IAAIjL,KAAKkL,WAAW,SAAS;AAAA,MAC7ChL,OAAO;AAAA,MACPgH,MAAM;AAAA,IAAA,CACP;AACD,UAAM,IAAI6C,MACR,6BAA6BlF,MAAMtE,MAAM,4BAA4B0K,UAAU1K,OAC7E4K,OAAOC,KAAK9E,SAAS,EAAE9H,IAAK6M,CAAAA,QAAQ,IAAIA,GAAG,GAAG,CAChD,CAAC,EACH;AAAA,EACF;AAEA,QAAMrM,QAAQ6F,MAAM7F,SAAS;AAE7B,MAAIA,UAAU,WAAWA,UAAU,aAAaA,UAAU;AACxD,UAAM,IAAI+K,MAAM,qEAAqE;AAGvF,QAAMN,iCAAiC5E,MAAM,mCAAmC;AAChF,MACE4E,kCACA,OAAOA,kCAAmC,YAC1C,CAAC6B,OAAOC,UAAU9B,8BAA8B;AAEhD,UAAM,IAAIM,MAAM,2DAA2D;AAG7E,QAAML,sBAAsB7E,MAAM,uBAAuB;AACzD,MACE6E,uBACA,OAAOA,uBAAwB,YAC/B,CAAC4B,OAAOC,UAAU7B,mBAAmB;AAErC,UAAM,IAAIK,MAAM,+CAA+C;AAGjE,QAAMZ,eAAsC;AAAA,IAC1C,GAAGqB,UAAU;AAAA,MACXgB,aAAa;AAAA,MACbC,gBAAgB;AAAA;AAAA,IAAA,CACjB,EAAEC,OAAAA;AAAAA;AAAAA;AAAAA;AAAAA,IAIHC,oBAAoB;AAAA;AAAA;AAAA;AAAA,IAIpBC,2BAA2B;AAAA,EAAA;AAG7B,MAAIjC;AACJ,MAAI9E,MAAMW,MAAM;AACd,QAAI,OAAOX,MAAMW,QAAS;AACxB,YAAM,IAAIuE,MAAM,2BAA2B;AAE7C,UAAM8B,WAAWpD,KAAK5B,QAAQwC,SAASxE,MAAMW,IAAI;AAGjD,QAAI,EADS,MAAMsG,GAAGC,SAASC,KAAKH,QAAQ,GAClCI,OAAAA;AACR,YAAM,IAAIlC,MAAM,uDAAuD;AAGzEJ,qBAAiBkC;AAAAA,EACnB;AAEA,QAAM9M,eAAe,MAAMkK,kBAAkB;AAAA,IAC3CjE,WAAWH,MAAMG;AAAAA,IACjBO,SAASV,MAAMU;AAAAA,IACf4D;AAAAA,IACAE;AAAAA,IACArK;AAAAA,IACAyK;AAAAA,IACAC;AAAAA,IACAC;AAAAA,IACAH,UAAWvL,CAAAA,YAEP4G,MAAMtE,UAAUsE,MAAMtE,UAAU+F,YAC5BA,UAAUzB,MAAMtE,MAAM,IACtB+F,UAAU1B,QAEA;AAAA,MAAC5G;AAAAA,MAAQC;AAAAA,MAAQ4G;AAAAA,IAAAA,CAAM;AAAA,IAEzCuF,YAAaK,WAAyBL;AAAAA,EAAAA,CACvC;AAEDd,UAAQ0B,WAAWjM,iBAAiB,UAAU,IAAI;AACpD;"}
|
|
1
|
+
{"version":3,"file":"validateAction2.js","sources":["../../src/_internal/cli/actions/validation/reporters/jsonReporter.ts","../../src/_internal/cli/actions/validation/reporters/ndjsonReporter.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/util.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/formatDocumentValidation.ts","../../src/_internal/cli/actions/validation/reporters/prettyReporter/prettyReporter.ts","../../src/_internal/cli/actions/validation/reporters/index.ts","../../src/_internal/cli/util/workerChannels.ts","../../src/_internal/cli/actions/validation/validateDocuments.ts","../../src/_internal/cli/actions/validation/validateAction.ts"],"sourcesContent":["import {type BuiltInValidationReporter} from '../validateAction'\n\n// TODO: replace with Array.fromAsync when it's out of stage3\nasync function arrayFromAsync<T>(iterable: AsyncIterable<T>) {\n const results: T[] = []\n for await (const item of iterable) results.push(item)\n return results\n}\n\nexport const json: BuiltInValidationReporter = async ({output, worker}) => {\n const results = await arrayFromAsync(worker.stream.validation())\n const formatted = results\n // report out only documents with some markers\n .filter(({markers}) => markers.length)\n // remove validatedCount from the results\n .map(({validatedCount, ...result}) => result)\n\n await worker.dispose()\n\n output.print(JSON.stringify(formatted))\n\n let overallLevel: 'error' | 'warning' | 'info' = 'info'\n\n for (const {level} of formatted) {\n if (level === 'error') overallLevel = 'error'\n if (level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n }\n\n return overallLevel\n}\n","import {type BuiltInValidationReporter} from '../validateAction'\n\nexport const ndjson: BuiltInValidationReporter = async ({output, worker}) => {\n let overallLevel: 'error' | 'warning' | 'info' = 'info'\n\n for await (const {validatedCount, ...result} of worker.stream.validation()) {\n if (result.level === 'error') overallLevel = 'error'\n if (result.level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n\n if (result.markers.length) {\n output.print(JSON.stringify(result))\n }\n }\n\n await worker.dispose()\n\n return overallLevel\n}\n","import {isatty} from 'node:tty'\n\nimport {type ValidationMarker} from '@sanity/types'\nimport logSymbols from 'log-symbols'\n\nexport type Level = ValidationMarker['level']\n\nexport interface DocumentValidationResult {\n revision: string\n documentId: string\n documentType: string\n intentUrl?: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n}\n\nexport const isTty = isatty(1)\nexport const levelValues = {error: 0, warning: 1, info: 2} as const\n\n/**\n * Generates a formatted string that includes a numerical amount and a subject\n * in plural or singular form. Note: the subject must end in `s`\n */\nexport const count = (amount: number, subject: string): string =>\n `${amount.toLocaleString('en-US')} ${\n amount === 1 ? subject.slice(0, Math.max(0, subject.length - 1)) : subject\n }`\n\nconst percentageFormatter = new Intl.NumberFormat('en-US', {\n style: 'percent',\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\n\n/**\n * Given a decimal, this will return that number formatted as a percentage\n */\nexport const percent = (value: number): string => percentageFormatter.format(Math.min(value, 1))\n\nconst secondFormatter = new Intl.NumberFormat('en-US', {\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\n\n/**\n * Given a start time in milliseconds, this will output the time elapsed\n * formatted in seconds\n */\nexport const seconds = (startTime: number): string => {\n const endTime = Date.now()\n return `(${secondFormatter.format((endTime - startTime) / 1000)}s)`\n}\n\ninterface ValidationTotals {\n valid: {documents: number}\n errors: {documents: number; markers: number}\n warnings: {documents: number; markers: number}\n infos: {documents: number; markers: number}\n}\n\n/**\n * Outputs summary text given an object of totals. Omits totals depending on the\n * desired validation `level`\n */\nexport const summary = (\n {errors, infos, valid, warnings}: ValidationTotals,\n level: Level = 'warning',\n): string => {\n const levelValue = levelValues[level]\n\n return [\n `${logSymbols.success} Valid: ${count(valid.documents, 'documents')}`,\n `${logSymbols.error} Errors: ${count(errors.documents, 'documents')}, ${count(\n errors.markers,\n 'errors',\n )}`,\n levelValue >= levelValues.warning &&\n `${logSymbols.warning} Warnings: ${count(warnings.documents, 'documents')}, ${count(\n warnings.markers,\n 'warnings',\n )}`,\n levelValue >= levelValues.info &&\n `${logSymbols.info} Info: ${count(infos.documents, 'documents')}, ${count(\n infos.documents,\n 'markers',\n )}`,\n ]\n .filter(Boolean)\n .join('\\n')\n}\n","import {type Path, type ValidationMarker} from '@sanity/types'\nimport chalk from 'chalk'\nimport logSymbols from 'log-symbols'\n\nimport {convertToTree, formatTree, maxKeyLength, type Tree} from '../../../../util/tree'\nimport {type DocumentValidationResult, isTty, type Level, levelValues} from './util'\n\nexport interface FormatDocumentValidationOptions extends DocumentValidationResult {\n studioHost?: string\n basePath?: string\n}\n\ninterface Marker extends Pick<ValidationMarker, 'level' | 'message'> {\n path: Path\n}\n\ntype ValidationTree = Tree<Marker>\n\nconst levelHeaders = {\n error: isTty ? chalk.bold(chalk.bgRed(chalk.black(' ERROR '))) : chalk.red('[ERROR]'),\n warning: isTty ? chalk.bold(chalk.bgYellow(chalk.black(' WARN '))) : chalk.yellow('[WARN]'),\n info: isTty ? chalk.bold(chalk.cyan(chalk.black(' INFO '))) : chalk.cyan('[INFO]'),\n}\n/**\n * Creates a terminal hyperlink. Only outputs a hyperlink if the output is\n * determined to be a TTY\n */\nconst link = (text: string, url: string) =>\n isTty ? `\\u001b]8;;${url}\\u0007${text}\\u001b]8;;\\u0007` : chalk.underline(text)\n\n/**\n * For sorting markers\n */\nconst compareLevels = <T extends {level: Level; message: string}>(a: T, b: T) =>\n levelValues[a.level] - levelValues[b.level]\n\n/**\n * Formats the markers at the root of the validation tree\n */\nconst formatRootErrors = (root: ValidationTree, hasChildren: boolean, paddingLength: number) => {\n if (!root.nodes) return ''\n\n const [first, ...rest] = root.nodes.slice().sort(compareLevels)\n if (!first) return ''\n\n const firstElbow = hasChildren ? '│ ' : '└─'\n const firstPadding = '.'.repeat(paddingLength - 6)\n const firstLine = `${firstElbow} (root) ${firstPadding} ${logSymbols[first.level]} ${\n first.message\n }`\n const subsequentPadding = ' '.repeat(paddingLength + 2)\n const subsequentElbow = hasChildren ? '│ ' : ' '\n\n const restOfLines = rest\n .map(\n (marker) =>\n `${subsequentElbow}${subsequentPadding} ${logSymbols[marker.level]} ${marker.message}`,\n )\n .join('\\n')\n return [firstLine, restOfLines].filter(Boolean).join('\\n')\n}\n\n/**\n * Formats document validation results into a user-friendly tree structure\n */\nexport function formatDocumentValidation({\n documentId,\n documentType,\n level,\n markers,\n intentUrl,\n}: FormatDocumentValidationOptions): string {\n const tree = convertToTree<Marker>(markers)\n\n const documentTypeHeader = isTty\n ? chalk.bgWhite(chalk.black(` ${documentType} `))\n : `[${documentType}]`\n\n const header = `${levelHeaders[level]} ${documentTypeHeader} ${\n intentUrl ? link(documentId, intentUrl) : chalk.underline(documentId)\n }`\n\n const paddingLength = Math.max(maxKeyLength(tree.children) + 2, 30)\n\n const childErrors = formatTree<Marker>({\n node: tree.children,\n paddingLength,\n getNodes: ({nodes}) => (nodes ?? []).slice().sort(compareLevels),\n getMessage: (marker) => [logSymbols[marker.level], marker.message].join(' '),\n })\n\n const rootErrors = formatRootErrors(tree, childErrors.length > 0, paddingLength)\n\n return [header, rootErrors, childErrors].filter(Boolean).join('\\n')\n}\n","import {type BuiltInValidationReporter} from '../../validateAction'\nimport {formatDocumentValidation} from './formatDocumentValidation'\nimport {\n count,\n type DocumentValidationResult,\n type Level,\n levelValues,\n percent,\n seconds,\n summary,\n} from './util'\n\n/**\n * Represents the default stylish/pretty reporter\n */\n// eslint-disable-next-line max-statements\nexport const pretty: BuiltInValidationReporter = async ({output, worker, flags}) => {\n const workspaceLoadStart = Date.now()\n // Report workspace loaded\n const spinner = output\n .spinner(flags.workspace ? `Loading workspace '${flags.workspace}'…` : 'Loading workspace…')\n .start()\n\n const workspace = await worker.event.loadedWorkspace()\n spinner.succeed(\n `Loaded workspace '${workspace.name}' using project '${workspace.projectId}' and dataset '${\n flags.dataset || workspace.dataset\n }' ${seconds(workspaceLoadStart)}`,\n )\n\n if (!flags.file) {\n // Report document count\n spinner.start('Calculating documents to be validated…')\n const {documentCount} = await worker.event.loadedDocumentCount()\n\n // Report export progress\n const downloadStart = Date.now()\n spinner.text = `Downloading ${count(documentCount, 'documents')}…`\n for await (const {downloadedCount} of worker.stream.exportProgress()) {\n const percentage = percent(downloadedCount / documentCount)\n spinner.text = `Downloading ${count(documentCount, 'documents')}… ${percentage}`\n }\n spinner.succeed(`Downloaded ${count(documentCount, 'documents')} ${seconds(downloadStart)}`)\n }\n\n const {totalDocumentsToValidate} = await worker.event.exportFinished()\n\n const referenceIntegrityStart = Date.now()\n spinner.start(`Checking reference existence…`)\n await worker.event.loadedReferenceIntegrity()\n spinner.succeed(`Checked all references ${seconds(referenceIntegrityStart)}`)\n\n // Report validation progress\n const validationStart = Date.now()\n spinner.start(`Validating ${count(totalDocumentsToValidate, 'documents')}…`)\n\n const results: DocumentValidationResult[] = []\n\n const totals = {\n valid: {documents: 0},\n errors: {documents: 0, markers: 0},\n warnings: {documents: 0, markers: 0},\n infos: {documents: 0, markers: 0},\n }\n\n for await (const {validatedCount, ...result} of worker.stream.validation()) {\n const {markers} = result\n\n if (markers.length) {\n results.push(result)\n }\n\n const errors = markers.filter((marker) => marker.level === 'error')\n const warnings = markers.filter((marker) => marker.level === 'warning')\n const infos = markers.filter((marker) => marker.level === 'info')\n\n if (!markers.length) {\n totals.valid.documents += 1\n }\n\n if (errors.length) {\n totals.errors.documents += 1\n totals.errors.markers += errors.length\n }\n\n if (warnings.length) {\n totals.warnings.documents += 1\n totals.warnings.markers += warnings.length\n }\n\n if (infos.length) {\n totals.infos.documents += 1\n totals.infos.markers += infos.length\n }\n\n spinner.text =\n `Validating ${count(totalDocumentsToValidate, 'documents')}…\\n\\n` +\n `Processed ${count(validatedCount, 'documents')} (${percent(\n validatedCount / totalDocumentsToValidate,\n )}):\\n${summary(totals, flags.level)}`\n }\n\n spinner.succeed(\n `Validated ${count(totalDocumentsToValidate, 'documents')} ${seconds(validationStart)}`,\n )\n output.print(`\\nValidation results:\\n${summary(totals, flags.level)}`)\n\n results.sort((a, b) => {\n if (a.level === b.level) return a.documentType.localeCompare(b.documentType)\n return levelValues[a.level] - levelValues[b.level]\n })\n\n let overallLevel: Level = 'info'\n\n for (const result of results) {\n if (result.level === 'error') overallLevel = 'error'\n if (result.level === 'warning' && overallLevel !== 'error') overallLevel = 'warning'\n\n output.print(`${formatDocumentValidation(result)}\\n`)\n }\n\n await worker.dispose()\n\n return overallLevel\n}\n","import {json} from './jsonReporter'\nimport {ndjson} from './ndjsonReporter'\nimport {pretty} from './prettyReporter'\n\nexport const reporters = {pretty, ndjson, json}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import path from 'node:path'\nimport {fileURLToPath} from 'node:url'\nimport {Worker} from 'node:worker_threads'\n\nimport {type ClientConfig} from '@sanity/client'\nimport {type ValidationMarker} from '@sanity/types'\nimport readPkgUp from 'read-pkg-up'\n\nimport {\n type ValidateDocumentsWorkerData,\n type ValidationWorkerChannel,\n} from '../../threads/validateDocuments'\nimport {createReceiver, type WorkerChannelReceiver} from '../../util/workerChannels'\n\nconst __dirname = path.dirname(fileURLToPath(import.meta.url))\n\nexport interface ValidateDocumentsOptions<TReturn = unknown> {\n level?: 'error' | 'warning' | 'info'\n workspace?: string\n workDir?: string\n configPath?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string // override\n dataset?: string // override\n ndjsonFilePath?: string\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n reporter?: (worker: WorkerChannelReceiver<ValidationWorkerChannel>) => TReturn\n studioHost?: string\n}\n\nexport interface DocumentValidationResult {\n documentId: string\n documentType: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n}\n\nconst defaultReporter = ({stream, dispose}: WorkerChannelReceiver<ValidationWorkerChannel>) => {\n async function* createValidationGenerator() {\n for await (const {documentId, documentType, markers, revision, level} of stream.validation()) {\n const result: DocumentValidationResult = {\n documentId,\n documentType,\n revision,\n level,\n markers,\n }\n\n yield result\n }\n\n await dispose()\n }\n\n return createValidationGenerator()\n}\n\nexport function validateDocuments<TReturn>(\n options: ValidateDocumentsOptions<TReturn> &\n Required<Pick<ValidateDocumentsOptions<TReturn>, 'reporter'>>,\n): TReturn\nexport function validateDocuments(\n options: ValidateDocumentsOptions,\n): AsyncIterable<DocumentValidationResult>\nexport function validateDocuments(options: ValidateDocumentsOptions): unknown {\n const {\n workspace,\n clientConfig,\n configPath,\n dataset,\n projectId,\n workDir = process.cwd(),\n reporter = defaultReporter,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n ndjsonFilePath,\n } = options\n\n const rootPkgPath = readPkgUp.sync({cwd: __dirname})?.path\n if (!rootPkgPath) {\n throw new Error('Could not find root directory for `sanity` package')\n }\n\n const workerPath = path.join(\n path.dirname(rootPkgPath),\n 'lib',\n '_internal',\n 'cli',\n 'threads',\n 'validateDocuments.cjs',\n )\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n // removes props in the config that make this object fail to serialize\n clientConfig: JSON.parse(JSON.stringify(clientConfig)),\n configPath,\n workspace,\n dataset,\n projectId,\n level,\n ndjsonFilePath,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost: options.studioHost,\n } satisfies ValidateDocumentsWorkerData,\n env: process.env,\n })\n\n return reporter(createReceiver<ValidationWorkerChannel>(worker))\n}\n","import fs from 'node:fs'\nimport path from 'node:path'\n\nimport {\n type CliCommandArguments,\n type CliCommandContext,\n type CliConfig,\n type CliOutputter,\n} from '@sanity/cli'\nimport {type ClientConfig} from '@sanity/client'\nimport chalk from 'chalk'\nimport logSymbols from 'log-symbols'\n\nimport {type ValidationWorkerChannel} from '../../threads/validateDocuments'\nimport {type WorkerChannelReceiver} from '../../util/workerChannels'\nimport {reporters} from './reporters'\nimport {validateDocuments} from './validateDocuments'\n\ninterface ValidateFlags {\n 'workspace'?: string\n 'format'?: string\n 'dataset'?: string\n 'file'?: string\n 'level'?: 'error' | 'warning' | 'info'\n 'max-custom-validation-concurrency'?: number\n 'max-fetch-concurrency'?: number\n 'yes'?: boolean\n 'y'?: boolean\n}\n\nexport type BuiltInValidationReporter = (options: {\n output: CliOutputter\n worker: WorkerChannelReceiver<ValidationWorkerChannel>\n flags: ValidateFlags\n}) => Promise<'error' | 'warning' | 'info'>\n\nexport default async function validateAction(\n args: CliCommandArguments<ValidateFlags>,\n {apiClient, workDir, output, cliConfig, prompt}: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const unattendedMode = Boolean(flags.yes || flags.y)\n\n if (!unattendedMode) {\n output.print(\n `${chalk.yellow(`${logSymbols.warning} Warning:`)} This command ${\n flags.file\n ? 'reads all documents from your input file'\n : 'downloads all documents from your dataset'\n } and processes them through your local schema within a ` +\n `simulated browser environment.\\n`,\n )\n output.print(`Potential pitfalls:\\n`)\n output.print(\n `- Processes all documents locally (excluding assets). Large datasets may require more resources.`,\n )\n output.print(\n `- Executes all custom validation functions. Some functions may need to be refactored for compatibility.`,\n )\n output.print(\n `- Not all standard browser features are available and may cause issues while loading your Studio.`,\n )\n output.print(\n `- Adheres to document permissions. Ensure this account can see all desired documents.`,\n )\n if (flags.file) {\n output.print(\n `- Checks for missing document references against the live dataset if not found in your file.`,\n )\n }\n\n const confirmed = await prompt.single<boolean>({\n type: 'confirm',\n message: `Are you sure you want to continue?`,\n default: true,\n })\n\n if (!confirmed) {\n output.print('User aborted')\n process.exitCode = 1\n return\n }\n }\n\n if (flags.format && !(flags.format in reporters)) {\n const formatter = new Intl.ListFormat('en-US', {\n style: 'long',\n type: 'conjunction',\n })\n throw new Error(\n `Did not recognize format '${flags.format}'. Available formats are ${formatter.format(\n Object.keys(reporters).map((key) => `'${key}'`),\n )}`,\n )\n }\n\n const level = flags.level || 'warning'\n\n if (level !== 'error' && level !== 'warning' && level !== 'info') {\n throw new Error(`Invalid level. Available levels are 'error', 'warning', and 'info'.`)\n }\n\n const maxCustomValidationConcurrency = flags['max-custom-validation-concurrency']\n if (\n maxCustomValidationConcurrency &&\n typeof maxCustomValidationConcurrency !== 'number' &&\n !Number.isInteger(maxCustomValidationConcurrency)\n ) {\n throw new Error(`'--max-custom-validation-concurrency' must be an integer.`)\n }\n\n const maxFetchConcurrency = flags['max-fetch-concurrency']\n if (\n maxFetchConcurrency &&\n typeof maxFetchConcurrency !== 'number' &&\n !Number.isInteger(maxFetchConcurrency)\n ) {\n throw new Error(`'--max-fetch-concurrency' must be an integer.`)\n }\n\n const clientConfig: Partial<ClientConfig> = {\n ...apiClient({\n requireUser: true,\n requireProject: false, // we'll get this from the workspace\n }).config(),\n // we set this explictly to true because the default client configuration\n // from the CLI comes configured with `useProjectHostname: false` when\n // `requireProject` is set to false\n useProjectHostname: true,\n // we set this explictly to true because we pass in a token via the\n // `clientConfiguration` object and also mock a browser environment in\n // this worker which triggers the browser warning\n ignoreBrowserTokenWarning: true,\n }\n\n let ndjsonFilePath\n if (flags.file) {\n if (typeof flags.file !== 'string') {\n throw new Error(`'--file' must be a string`)\n }\n const filePath = path.resolve(workDir, flags.file)\n\n const stat = await fs.promises.stat(filePath)\n if (!stat.isFile()) {\n throw new Error(`'--file' must point to a valid ndjson file or tarball`)\n }\n\n ndjsonFilePath = filePath\n }\n\n const overallLevel = await validateDocuments({\n workspace: flags.workspace,\n dataset: flags.dataset,\n clientConfig,\n workDir,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n ndjsonFilePath,\n reporter: (worker) => {\n const reporter =\n flags.format && flags.format in reporters\n ? reporters[flags.format as keyof typeof reporters]\n : reporters.pretty\n\n return reporter({output, worker, flags})\n },\n studioHost: (cliConfig as CliConfig)?.studioHost,\n })\n\n process.exitCode = overallLevel === 'error' ? 1 : 0\n}\n"],"names":["arrayFromAsync","iterable","results","item","push","json","output","worker","formatted","stream","validation","filter","markers","length","map","validatedCount","result","dispose","print","JSON","stringify","overallLevel","level","ndjson","isTty","isatty","levelValues","error","warning","info","count","amount","subject","toLocaleString","slice","Math","max","percentageFormatter","Intl","NumberFormat","style","minimumFractionDigits","maximumFractionDigits","percent","value","format","min","secondFormatter","seconds","startTime","endTime","Date","now","summary","errors","infos","valid","warnings","levelValue","logSymbols","success","documents","Boolean","join","levelHeaders","chalk","bold","bgRed","black","red","bgYellow","yellow","cyan","link","text","url","underline","compareLevels","a","b","formatRootErrors","root","hasChildren","paddingLength","nodes","first","rest","sort","firstElbow","firstPadding","repeat","firstLine","message","subsequentPadding","subsequentElbow","restOfLines","marker","formatDocumentValidation","documentId","documentType","intentUrl","tree","convertToTree","documentTypeHeader","bgWhite","header","maxKeyLength","children","childErrors","formatTree","node","getNodes","getMessage","rootErrors","pretty","flags","workspaceLoadStart","spinner","workspace","start","event","loadedWorkspace","succeed","name","projectId","dataset","file","documentCount","loadedDocumentCount","downloadStart","downloadedCount","exportProgress","percentage","totalDocumentsToValidate","exportFinished","referenceIntegrityStart","loadedReferenceIntegrity","validationStart","totals","localeCompare","reporters","MessageQueue","resolver","queue","done","next","Promise","resolve","shift","end","undefined","isWorkerChannelMessage","type","includes","createReceiver","_events","Map","_streams","eventQueue","get","has","set","streamQueue","handleMessage","handleError","addListener","Proxy","target","race","payload","prop","streamReceiver","removeListener","terminate","__dirname","path","dirname","fileURLToPath","import","defaultReporter","createValidationGenerator","revision","validateDocuments","options","clientConfig","configPath","workDir","process","cwd","reporter","maxCustomValidationConcurrency","maxFetchConcurrency","ndjsonFilePath","rootPkgPath","readPkgUp","sync","Error","workerPath","Worker","workerData","parse","studioHost","env","validateAction","args","apiClient","cliConfig","prompt","extOptions","yes","y","single","default","exitCode","formatter","ListFormat","Object","keys","key","Number","isInteger","requireUser","requireProject","config","useProjectHostname","ignoreBrowserTokenWarning","filePath","fs","promises","stat","isFile"],"mappings":";;;;;;;;;AAGA,eAAeA,eAAkBC,UAA4B;AAC3D,QAAMC,UAAe,CAAA;AACrB,mBAAiBC,QAAQF,SAAUC,SAAQE,KAAKD,IAAI;AACpD,SAAOD;AACT;AAEO,MAAMG,OAAkC,OAAO;AAAA,EAACC;AAAAA,EAAQC;AAAM,MAAM;AAEzE,QAAMC,aADU,MAAMR,eAAeO,OAAOE,OAAOC,WAAAA,CAAY,GAG5DC,OAAO,CAAC;AAAA,IAACC;AAAAA,EAAAA,MAAaA,QAAQC,MAAM,EAEpCC,IAAI,CAAC;AAAA,IAACC;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,MAAYA,MAAM;AAE9C,QAAMT,OAAOU,WAEbX,OAAOY,MAAMC,KAAKC,UAAUZ,SAAS,CAAC;AAEtC,MAAIa,eAA6C;AAEjD,aAAW;AAAA,IAACC;AAAAA,EAAAA,KAAUd;AAChBc,cAAU,YAASD,eAAe,UAClCC,UAAU,aAAaD,iBAAiB,YAASA,eAAe;AAGtE,SAAOA;AACT,GC3BaE,SAAoC,OAAO;AAAA,EAACjB;AAAAA,EAAQC;AAAM,MAAM;AAC3E,MAAIc,eAA6C;AAEjD,mBAAiB;AAAA,IAACN;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,KAAWT,OAAOE,OAAOC,WAAAA;AACxDM,WAAOM,UAAU,YAASD,eAAe,UACzCL,OAAOM,UAAU,aAAaD,iBAAiB,YAASA,eAAe,YAEvEL,OAAOJ,QAAQC,UACjBP,OAAOY,MAAMC,KAAKC,UAAUJ,MAAM,CAAC;AAIvC,SAAA,MAAMT,OAAOU,WAENI;AACT,GCDaG,QAAQC,OAAO,CAAC,GAChBC,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAM5CC,QAAQA,CAACC,QAAgBC,YACpC,GAAGD,OAAOE,eAAe,OAAO,CAAC,IAC/BF,WAAW,IAAIC,QAAQE,MAAM,GAAGC,KAAKC,IAAI,GAAGJ,QAAQnB,SAAS,CAAC,CAAC,IAAImB,OAAO,IAGxEK,sBAAsB,IAAIC,KAAKC,aAAa,SAAS;AAAA,EACzDC,OAAO;AAAA,EACPC,uBAAuB;AAAA,EACvBC,uBAAuB;AACzB,CAAC,GAKYC,UAAWC,WAA0BP,oBAAoBQ,OAAOV,KAAKW,IAAIF,OAAO,CAAC,CAAC,GAEzFG,kBAAkB,IAAIT,KAAKC,aAAa,SAAS;AAAA,EACrDE,uBAAuB;AAAA,EACvBC,uBAAuB;AACzB,CAAC,GAMYM,UAAWC,CAAAA,cAA8B;AACpD,QAAMC,UAAUC,KAAKC,IAAAA;AACrB,SAAO,IAAIL,gBAAgBF,QAAQK,UAAUD,aAAa,GAAI,CAAC;AACjE,GAaaI,UAAUA,CACrB;AAAA,EAACC;AAAAA,EAAQC;AAAAA,EAAOC;AAAAA,EAAOC;AAA0B,GACjDnC,QAAe,cACJ;AACX,QAAMoC,aAAahC,YAAYJ,KAAK;AAEpC,SAAO,CACL,GAAGqC,WAAWC,OAAO,cAAc9B,MAAM0B,MAAMK,WAAW,WAAW,CAAC,IACtE,GAAGF,WAAWhC,KAAK,cAAcG,MAAMwB,OAAOO,WAAW,WAAW,CAAC,KAAK/B,MACxEwB,OAAO1C,SACP,QACF,CAAC,IACD8C,cAAchC,YAAYE,WACxB,GAAG+B,WAAW/B,OAAO,cAAcE,MAAM2B,SAASI,WAAW,WAAW,CAAC,KAAK/B,MAC5E2B,SAAS7C,SACT,UACF,CAAC,IACH8C,cAAchC,YAAYG,QACxB,GAAG8B,WAAW9B,IAAI,cAAcC,MAAMyB,MAAMM,WAAW,WAAW,CAAC,KAAK/B,MACtEyB,MAAMM,WACN,SACF,CAAC,EAAE,EAEJlD,OAAOmD,OAAO,EACdC,KAAK;AAAA,CAAI;AACd,GCvEMC,eAAe;AAAA,EACnBrC,OAAOH,QAAQyC,MAAMC,KAAKD,MAAME,MAAMF,MAAMG,MAAM,SAAS,CAAC,CAAC,IAAIH,MAAMI,IAAI,SAAS;AAAA,EACpFzC,SAASJ,QAAQyC,MAAMC,KAAKD,MAAMK,SAASL,MAAMG,MAAM,QAAQ,CAAC,CAAC,IAAIH,MAAMM,OAAO,QAAQ;AAAA,EAC1F1C,MAAML,QAAQyC,MAAMC,KAAKD,MAAMO,KAAKP,MAAMG,MAAM,QAAQ,CAAC,CAAC,IAAIH,MAAMO,KAAK,QAAQ;AACnF,GAKMC,OAAOA,CAACC,MAAcC,QAC1BnD,QAAQ,WAAamD,GAAG,OAASD,IAAI,iBAAqBT,MAAMW,UAAUF,IAAI,GAK1EG,gBAAgBA,CAA4CC,GAAMC,MACtErD,YAAYoD,EAAExD,KAAK,IAAII,YAAYqD,EAAEzD,KAAK,GAKtC0D,mBAAmBA,CAACC,MAAsBC,aAAsBC,kBAA0B;AAC9F,MAAI,CAACF,KAAKG,MAAO,QAAO;AAExB,QAAM,CAACC,OAAO,GAAGC,IAAI,IAAIL,KAAKG,MAAMlD,MAAAA,EAAQqD,KAAKV,aAAa;AAC9D,MAAI,CAACQ,MAAO,QAAO;AAEnB,QAAMG,aAAaN,cAAc,YAAO,gBAClCO,eAAe,IAAIC,OAAOP,gBAAgB,CAAC,GAC3CQ,YAAY,GAAGH,UAAU,WAAWC,YAAY,IAAI9B,WAAW0B,MAAM/D,KAAK,CAAC,IAC/E+D,MAAMO,OAAO,IAETC,oBAAoB,IAAIH,OAAOP,gBAAgB,CAAC,GAChDW,kBAAkBZ,cAAc,YAAO,MAEvCa,cAAcT,KACjBxE,IACEkF,CAAAA,WACC,GAAGF,eAAe,GAAGD,iBAAiB,IAAIlC,WAAWqC,OAAO1E,KAAK,CAAC,IAAI0E,OAAOJ,OAAO,EACxF,EACC7B,KAAK;AAAA,CAAI;AACZ,SAAO,CAAC4B,WAAWI,WAAW,EAAEpF,OAAOmD,OAAO,EAAEC,KAAK;AAAA,CAAI;AAC3D;AAKO,SAASkC,yBAAyB;AAAA,EACvCC;AAAAA,EACAC;AAAAA,EACA7E;AAAAA,EACAV;AAAAA,EACAwF;AAC+B,GAAW;AAC1C,QAAMC,OAAOC,cAAsB1F,OAAO,GAEpC2F,qBAAqB/E,QACvByC,MAAMuC,QAAQvC,MAAMG,MAAM,IAAI+B,YAAY,GAAG,CAAC,IAC9C,IAAIA,YAAY,KAEdM,SAAS,GAAGzC,aAAa1C,KAAK,CAAC,IAAIiF,kBAAkB,IACzDH,YAAY3B,KAAKyB,YAAYE,SAAS,IAAInC,MAAMW,UAAUsB,UAAU,CAAC,IAGjEf,gBAAgBhD,KAAKC,IAAIsE,aAAaL,KAAKM,QAAQ,IAAI,GAAG,EAAE,GAE5DC,cAAcC,WAAmB;AAAA,IACrCC,MAAMT,KAAKM;AAAAA,IACXxB;AAAAA,IACA4B,UAAUA,CAAC;AAAA,MAAC3B;AAAAA,IAAAA,OAAYA,SAAS,CAAA,GAAIlD,MAAAA,EAAQqD,KAAKV,aAAa;AAAA,IAC/DmC,YAAahB,CAAAA,WAAW,CAACrC,WAAWqC,OAAO1E,KAAK,GAAG0E,OAAOJ,OAAO,EAAE7B,KAAK,GAAG;AAAA,EAAA,CAC5E,GAEKkD,aAAajC,iBAAiBqB,MAAMO,YAAY/F,SAAS,GAAGsE,aAAa;AAE/E,SAAO,CAACsB,QAAQQ,YAAYL,WAAW,EAAEjG,OAAOmD,OAAO,EAAEC,KAAK;AAAA,CAAI;AACpE;AC9EO,MAAMmD,SAAoC,OAAO;AAAA,EAAC5G;AAAAA,EAAQC;AAAAA,EAAQ4G;AAAK,MAAM;AAClF,QAAMC,qBAAqBjE,KAAKC,OAE1BiE,UAAU/G,OACb+G,QAAQF,MAAMG,YAAY,sBAAsBH,MAAMG,SAAS,YAAO,yBAAoB,EAC1FC,SAEGD,YAAY,MAAM/G,OAAOiH,MAAMC,gBAAAA;AAOrC,MANAJ,QAAQK,QACN,qBAAqBJ,UAAUK,IAAI,oBAAoBL,UAAUM,SAAS,kBACxET,MAAMU,WAAWP,UAAUO,OAAO,KAC/B7E,QAAQoE,kBAAkB,CAAC,EAClC,GAEI,CAACD,MAAMW,MAAM;AAEfT,YAAQE,MAAM,6CAAwC;AACtD,UAAM;AAAA,MAACQ;AAAAA,IAAAA,IAAiB,MAAMxH,OAAOiH,MAAMQ,uBAGrCC,gBAAgB9E,KAAKC,IAAAA;AAC3BiE,YAAQ3C,OAAO,eAAe5C,MAAMiG,eAAe,WAAW,CAAC;AAC/D,qBAAiB;AAAA,MAACG;AAAAA,IAAAA,KAAoB3H,OAAOE,OAAO0H,kBAAkB;AACpE,YAAMC,aAAazF,QAAQuF,kBAAkBH,aAAa;AAC1DV,cAAQ3C,OAAO,eAAe5C,MAAMiG,eAAe,WAAW,CAAC,UAAKK,UAAU;AAAA,IAChF;AACAf,YAAQK,QAAQ,cAAc5F,MAAMiG,eAAe,WAAW,CAAC,IAAI/E,QAAQiF,aAAa,CAAC,EAAE;AAAA,EAC7F;AAEA,QAAM;AAAA,IAACI;AAAAA,EAAAA,IAA4B,MAAM9H,OAAOiH,MAAMc,kBAEhDC,0BAA0BpF,KAAKC,IAAAA;AACrCiE,UAAQE,MAAM,oCAA+B,GAC7C,MAAMhH,OAAOiH,MAAMgB,yBAAAA,GACnBnB,QAAQK,QAAQ,0BAA0B1E,QAAQuF,uBAAuB,CAAC,EAAE;AAG5E,QAAME,kBAAkBtF,KAAKC,IAAAA;AAC7BiE,UAAQE,MAAM,cAAczF,MAAMuG,0BAA0B,WAAW,CAAC,QAAG;AAE3E,QAAMnI,UAAsC,CAAA,GAEtCwI,SAAS;AAAA,IACblF,OAAO;AAAA,MAACK,WAAW;AAAA,IAAA;AAAA,IACnBP,QAAQ;AAAA,MAACO,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,IAChC6C,UAAU;AAAA,MAACI,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,IAClC2C,OAAO;AAAA,MAACM,WAAW;AAAA,MAAGjD,SAAS;AAAA,IAAA;AAAA,EAAC;AAGlC,mBAAiB;AAAA,IAACG;AAAAA,IAAgB,GAAGC;AAAAA,EAAAA,KAAWT,OAAOE,OAAOC,cAAc;AAC1E,UAAM;AAAA,MAACE;AAAAA,IAAAA,IAAWI;AAEdJ,YAAQC,UACVX,QAAQE,KAAKY,MAAM;AAGrB,UAAMsC,SAAS1C,QAAQD,OAAQqF,CAAAA,WAAWA,OAAO1E,UAAU,OAAO,GAC5DmC,WAAW7C,QAAQD,OAAQqF,YAAWA,OAAO1E,UAAU,SAAS,GAChEiC,QAAQ3C,QAAQD,OAAQqF,CAAAA,WAAWA,OAAO1E,UAAU,MAAM;AAE3DV,YAAQC,WACX6H,OAAOlF,MAAMK,aAAa,IAGxBP,OAAOzC,WACT6H,OAAOpF,OAAOO,aAAa,GAC3B6E,OAAOpF,OAAO1C,WAAW0C,OAAOzC,SAG9B4C,SAAS5C,WACX6H,OAAOjF,SAASI,aAAa,GAC7B6E,OAAOjF,SAAS7C,WAAW6C,SAAS5C,SAGlC0C,MAAM1C,WACR6H,OAAOnF,MAAMM,aAAa,GAC1B6E,OAAOnF,MAAM3C,WAAW2C,MAAM1C,SAGhCwG,QAAQ3C,OACN,cAAc5C,MAAMuG,0BAA0B,WAAW,CAAC;AAAA;AAAA,YAC7CvG,MAAMf,gBAAgB,WAAW,CAAC,KAAK4B,QAClD5B,iBAAiBsH,wBACnB,CAAC;AAAA,EAAOhF,QAAQqF,QAAQvB,MAAM7F,KAAK,CAAC;AAAA,EACxC;AAEA+F,UAAQK,QACN,aAAa5F,MAAMuG,0BAA0B,WAAW,CAAC,IAAIrF,QAAQyF,eAAe,CAAC,EACvF,GACAnI,OAAOY,MAAM;AAAA;AAAA,EAA0BmC,QAAQqF,QAAQvB,MAAM7F,KAAK,CAAC,EAAE,GAErEpB,QAAQqF,KAAK,CAACT,GAAGC,MACXD,EAAExD,UAAUyD,EAAEzD,QAAcwD,EAAEqB,aAAawC,cAAc5D,EAAEoB,YAAY,IACpEzE,YAAYoD,EAAExD,KAAK,IAAII,YAAYqD,EAAEzD,KAAK,CAClD;AAED,MAAID,eAAsB;AAE1B,aAAWL,UAAUd;AACfc,WAAOM,UAAU,YAASD,eAAe,UACzCL,OAAOM,UAAU,aAAaD,iBAAiB,YAASA,eAAe,YAE3Ef,OAAOY,MAAM,GAAG+E,yBAAyBjF,MAAM,CAAC;AAAA,CAAI;AAGtD,SAAA,MAAMT,OAAOU,WAENI;AACT,GCxHauH,YAAY;AAAA,EAAC1B;AAAAA,EAAQ3F;AAAAA,EAAQlB;AAAI;ACmE9C,MAAMwI,aAAgB;AAAA,EACpBC,WAAyD;AAAA,EACzDC,QAAa,CAAA;AAAA,EAEb3I,KAAKwF,SAAY;AACX,SAAKkD,YACP,KAAKA,SAAS;AAAA,MAAClG,OAAOgD;AAAAA,MAASoD,MAAM;AAAA,IAAA,CAAM,GAC3C,KAAKF,WAAW,QAEhB,KAAKC,MAAM3I,KAAKwF,OAAO;AAAA,EAE3B;AAAA,EAEAqD,OAAmC;AACjC,WAAI,KAAKF,MAAMlI,SACNqI,QAAQC,QAAQ;AAAA,MAACvG,OAAO,KAAKmG,MAAMK,MAAAA;AAAAA,MAAUJ,MAAM;AAAA,IAAA,CAAM,IAG3D,IAAIE,QAASC,CAAAA,YAAa,KAAKL,WAAWK,OAAQ;AAAA,EAC3D;AAAA,EAEAE,MAAM;AACA,SAAKP,YACP,KAAKA,SAAS;AAAA,MAAClG,OAAO0G;AAAAA,MAAWN,MAAM;AAAA,IAAA,CAAK;AAAA,EAEhD;AACF;AAEA,SAASO,uBAAuB3D,SAAmD;AAIjF,SAHI,OAAOA,WAAY,YACnB,CAACA,WACD,EAAE,UAAUA,YACZ,OAAOA,QAAQ4D,QAAS,WAAiB,KACrB,CAAC,SAAS,YAAY,KAAK,EACtCC,SAAS7D,QAAQ4D,IAAI;AACpC;AAOO,SAASE,eACdnJ,QACuC;AACvC,QAAMoJ,UAAU,oBAAIC,IAAAA,GACdC,WAAW,oBAAID,OACftG,SAAS,IAAIuF,aAAAA,GAEbiB,aAAcnC,CAAAA,SAAiB;AACnC,UAAMoB,QAAQY,QAAQI,IAAIpC,IAAI,KAAK,IAAIkB,aAAAA;AACvC,WAAKc,QAAQK,IAAIrC,IAAI,KAAGgC,QAAQM,IAAItC,MAAMoB,KAAK,GACxCA;AAAAA,EACT,GAEMmB,cAAevC,CAAAA,SAAiB;AACpC,UAAMoB,QAAQc,SAASE,IAAIpC,IAAI,KAAK,IAAIkB,aAAAA;AACxC,WAAKgB,SAASG,IAAIrC,IAAI,KAAGkC,SAASI,IAAItC,MAAMoB,KAAK,GAC1CA;AAAAA,EACT,GAEMoB,gBAAiBvE,CAAAA,YAAqB;AACrC2D,2BAAuB3D,OAAO,MAC/BA,QAAQ4D,SAAS,WAASM,WAAWlE,QAAQ+B,IAAI,EAAEvH,KAAKwF,OAAO,GAC/DA,QAAQ4D,SAAS,cAAYU,YAAYtE,QAAQ+B,IAAI,EAAEvH,KAAKwF,OAAO,GACnEA,QAAQ4D,SAAS,SAAOU,YAAYtE,QAAQ+B,IAAI,EAAE0B,IAAAA;AAAAA,EACxD,GAEMe,cAAezI,CAAAA,UAAmB;AACtC2B,WAAOlD,KAAK;AAAA,MAACoJ,MAAM;AAAA,MAAS7H;AAAAA,IAAAA,CAAM;AAAA,EACpC;AAEApB,SAAAA,OAAO8J,YAAY,WAAWF,aAAa,GAC3C5J,OAAO8J,YAAY,SAASD,WAAW,GAEhC;AAAA,IACL5C,OAAO,IAAI8C,MAAM,IAAsD;AAAA,MACrEP,KAAKA,CAACQ,QAAQ5C,SACR,OAAOA,QAAS,WAAiB4C,OAAO5C,IAAI,IAEX,YAAY;AAC/C,cAAM;AAAA,UAAC/E;AAAAA,QAAAA,IAAS,MAAMsG,QAAQsB,KAAK,CAACV,WAAWnC,IAAI,EAAEsB,QAAQ3F,OAAO2F,KAAAA,CAAM,CAAC;AAC3E,YAAIrG,MAAM4G,SAAS,QAAS,OAAM5G,MAAMjB;AACxC,eAAOiB,MAAM6H;AAAAA,MACf;AAAA,IAAA,CAIH;AAAA,IACDhK,QAAQ,IAAI6J,MAAM,IAAuD;AAAA,MACvEP,KAAKA,CAACQ,QAAQG,SAAS;AACrB,YAAI,OAAOA,QAAS,SAAU,QAAOH,OAAOG,IAAI;AAChD,cAAM/C,OAAO+C;AAEb,wBAAgBC,iBAAiB;AAC/B,qBAAa;AACX,kBAAM;AAAA,cAAC/H;AAAAA,cAAOoG;AAAAA,YAAAA,IAAQ,MAAME,QAAQsB,KAAK,CAACN,YAAYvC,IAAI,EAAEsB,QAAQ3F,OAAO2F,KAAAA,CAAM,CAAC;AAClF,gBAAID,KAAM;AACV,gBAAIpG,MAAM4G,SAAS,QAAS,OAAM5G,MAAMjB;AACxC,kBAAMiB,MAAM6H;AAAAA,UACd;AAAA,QACF;AAEA,eAAOE;AAAAA,MACT;AAAA,IAAA,CACD;AAAA,IACD1J,SAASA,OACPV,OAAOqK,eAAe,WAAWT,aAAa,GAC9C5J,OAAOqK,eAAe,SAASR,WAAW,GACnC7J,OAAOsK,UAAAA;AAAAA,EAAU;AAG9B;ACzKA,MAAMC,cAAYC,KAAKC,QAAQC,cAAcC,YAAYvG,GAAG,CAAC,GAyBvDwG,kBAAkBA,CAAC;AAAA,EAAC1K;AAAAA,EAAQQ;AAAuD,MAAM;AAC7F,kBAAgBmK,4BAA4B;AAC1C,qBAAiB;AAAA,MAAClF;AAAAA,MAAYC;AAAAA,MAAcvF;AAAAA,MAASyK;AAAAA,MAAU/J;AAAAA,IAAAA,KAAUb,OAAOC,WAAAA;AAS9E,YARyC;AAAA,QACvCwF;AAAAA,QACAC;AAAAA,QACAkF;AAAAA,QACA/J;AAAAA,QACAV;AAAAA,MAAAA;AAMJ,UAAMK,QAAAA;AAAAA,EACR;AAEA,SAAOmK,0BAAAA;AACT;AASO,SAASE,kBAAkBC,SAA4C;AAC5E,QAAM;AAAA,IACJjE;AAAAA,IACAkE;AAAAA,IACAC;AAAAA,IACA5D;AAAAA,IACAD;AAAAA,IACA8D,UAAUC,QAAQC,IAAAA;AAAAA,IAClBC,WAAWV;AAAAA,IACX7J;AAAAA,IACAwK;AAAAA,IACAC;AAAAA,IACAC;AAAAA,EAAAA,IACET,SAEEU,cAAcC,UAAUC,KAAK;AAAA,IAACP,KAAKd;AAAAA,EAAAA,CAAU,GAAGC;AACtD,MAAI,CAACkB;AACH,UAAM,IAAIG,MAAM,oDAAoD;AAGtE,QAAMC,aAAatB,KAAKhH,KACtBgH,KAAKC,QAAQiB,WAAW,GACxB,OACA,aACA,OACA,WACA,uBACF,GAEM1L,SAAS,IAAI+L,OAAOD,YAAY;AAAA,IACpCE,YAAY;AAAA,MACVb;AAAAA;AAAAA,MAEAF,cAAcrK,KAAKqL,MAAMrL,KAAKC,UAAUoK,YAAY,CAAC;AAAA,MACrDC;AAAAA,MACAnE;AAAAA,MACAO;AAAAA,MACAD;AAAAA,MACAtG;AAAAA,MACA0K;AAAAA,MACAF;AAAAA,MACAC;AAAAA,MACAU,YAAYlB,QAAQkB;AAAAA,IAAAA;AAAAA,IAEtBC,KAAKf,QAAQe;AAAAA,EAAAA,CACd;AAED,SAAOb,SAASnC,eAAwCnJ,MAAM,CAAC;AACjE;AC9EA,eAA8BoM,eAC5BC,MACA;AAAA,EAACC;AAAAA,EAAWnB;AAAAA,EAASpL;AAAAA,EAAQwM;AAAAA,EAAWC;AAAyB,GAClD;AACf,QAAM5F,QAAQyF,KAAKI;AAGnB,MAAI,EAF2B7F,MAAM8F,OAAO9F,MAAM+F,OAGhD5M,OAAOY,MACL,GAAG+C,MAAMM,OAAO,GAAGZ,WAAW/B,OAAO,WAAW,CAAC,iBAC/CuF,MAAMW,OACF,6CACA,2CAA2C;AAAA,CAGnD,GACAxH,OAAOY,MAAM;AAAA,CAAuB,GACpCZ,OAAOY,MACL,kGACF,GACAZ,OAAOY,MACL,yGACF,GACAZ,OAAOY,MACL,mGACF,GACAZ,OAAOY,MACL,uFACF,GACIiG,MAAMW,QACRxH,OAAOY,MACL,8FACF,GASE,CANc,MAAM6L,OAAOI,OAAgB;AAAA,IAC7C3D,MAAM;AAAA,IACN5D,SAAS;AAAA,IACTwH,SAAS;AAAA,EAAA,CACV,IAEe;AACd9M,WAAOY,MAAM,cAAc,GAC3ByK,QAAQ0B,WAAW;AACnB;AAAA,EACF;AAGF,MAAIlG,MAAMtE,UAAU,EAAEsE,MAAMtE,UAAU+F,YAAY;AAChD,UAAM0E,YAAY,IAAIhL,KAAKiL,WAAW,SAAS;AAAA,MAC7C/K,OAAO;AAAA,MACPgH,MAAM;AAAA,IAAA,CACP;AACD,UAAM,IAAI4C,MACR,6BAA6BjF,MAAMtE,MAAM,4BAA4ByK,UAAUzK,OAC7E2K,OAAOC,KAAK7E,SAAS,EAAE9H,IAAK4M,CAAAA,QAAQ,IAAIA,GAAG,GAAG,CAChD,CAAC,EACH;AAAA,EACF;AAEA,QAAMpM,QAAQ6F,MAAM7F,SAAS;AAE7B,MAAIA,UAAU,WAAWA,UAAU,aAAaA,UAAU;AACxD,UAAM,IAAI8K,MAAM,qEAAqE;AAGvF,QAAMN,iCAAiC3E,MAAM,mCAAmC;AAChF,MACE2E,kCACA,OAAOA,kCAAmC,YAC1C,CAAC6B,OAAOC,UAAU9B,8BAA8B;AAEhD,UAAM,IAAIM,MAAM,2DAA2D;AAG7E,QAAML,sBAAsB5E,MAAM,uBAAuB;AACzD,MACE4E,uBACA,OAAOA,uBAAwB,YAC/B,CAAC4B,OAAOC,UAAU7B,mBAAmB;AAErC,UAAM,IAAIK,MAAM,+CAA+C;AAGjE,QAAMZ,eAAsC;AAAA,IAC1C,GAAGqB,UAAU;AAAA,MACXgB,aAAa;AAAA,MACbC,gBAAgB;AAAA;AAAA,IAAA,CACjB,EAAEC,OAAAA;AAAAA;AAAAA;AAAAA;AAAAA,IAIHC,oBAAoB;AAAA;AAAA;AAAA;AAAA,IAIpBC,2BAA2B;AAAA,EAAA;AAG7B,MAAIjC;AACJ,MAAI7E,MAAMW,MAAM;AACd,QAAI,OAAOX,MAAMW,QAAS;AACxB,YAAM,IAAIsE,MAAM,2BAA2B;AAE7C,UAAM8B,WAAWnD,KAAK5B,QAAQuC,SAASvE,MAAMW,IAAI;AAGjD,QAAI,EADS,MAAMqG,GAAGC,SAASC,KAAKH,QAAQ,GAClCI,OAAAA;AACR,YAAM,IAAIlC,MAAM,uDAAuD;AAGzEJ,qBAAiBkC;AAAAA,EACnB;AAEA,QAAM7M,eAAe,MAAMiK,kBAAkB;AAAA,IAC3ChE,WAAWH,MAAMG;AAAAA,IACjBO,SAASV,MAAMU;AAAAA,IACf2D;AAAAA,IACAE;AAAAA,IACApK;AAAAA,IACAwK;AAAAA,IACAC;AAAAA,IACAC;AAAAA,IACAH,UAAWtL,CAAAA,YAEP4G,MAAMtE,UAAUsE,MAAMtE,UAAU+F,YAC5BA,UAAUzB,MAAMtE,MAAM,IACtB+F,UAAU1B,QAEA;AAAA,MAAC5G;AAAAA,MAAQC;AAAAA,MAAQ4G;AAAAA,IAAAA,CAAM;AAAA,IAEzCsF,YAAaK,WAAyBL;AAAAA,EAAAA,CACvC;AAEDd,UAAQ0B,WAAWhM,iBAAiB,UAAU,IAAI;AACpD;"}
|
|
@@ -7,7 +7,7 @@ try {
|
|
|
7
7
|
try {
|
|
8
8
|
buildVersion = buildVersion || // This is replaced by `@sanity/pkg-utils` at build time
|
|
9
9
|
// and must always be references by its full static name, e.g. no optional chaining, no `if (process && process.env)` etc.
|
|
10
|
-
"5.0.0-next-major.
|
|
10
|
+
"5.0.0-next-major.20251210134624+f29c4bb975";
|
|
11
11
|
} catch {
|
|
12
12
|
}
|
|
13
13
|
const SANITY_VERSION = buildVersion || `${version}-dev`;
|
|
@@ -307,7 +307,7 @@ async function main() {
|
|
|
307
307
|
const validate = async (document) => {
|
|
308
308
|
let markers;
|
|
309
309
|
try {
|
|
310
|
-
const timeout = Symbol("timeout"), result = await Promise.race([sanity.validateDocument({
|
|
310
|
+
const timeout = /* @__PURE__ */ Symbol("timeout"), result = await Promise.race([sanity.validateDocument({
|
|
311
311
|
document,
|
|
312
312
|
workspace,
|
|
313
313
|
getClient,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"validateDocuments.cjs","sources":["../../../../src/_internal/cli/util/extractDocumentsFromNdjsonOrTarball.ts","../../../../src/_internal/cli/util/workerChannels.ts","../../../../src/_internal/cli/threads/validateDocuments.ts"],"sourcesContent":["import path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable, type Writable} from 'node:stream'\nimport zlib from 'node:zlib'\n\nimport {type SanityDocument} from '@sanity/types'\nimport tar from 'tar-stream'\n\nconst HEADER_SIZE = 300\n\n// https://github.com/kevva/is-gzip/blob/13dab7c877787bd5cff9de5482b1736f00df99c6/index.js\nconst isGzip = (buf: Buffer) =>\n buf.length >= 3 && buf[0] === 0x1f && buf[1] === 0x8b && buf[2] === 0x08\n\n// https://github.com/watson/is-deflate/blob/f9e8f0c7814eed715e13e29e97c69acee319686a/index.js\nconst isDeflate = (buf: Buffer) =>\n buf.length >= 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n\n// https://github.com/kevva/is-tar/blob/d295ffa2002a5d415946fc3d49f024ace8c28bd3/index.js\nconst isTar = (buf: Buffer) =>\n buf.length >= 262 &&\n buf[257] === 0x75 &&\n buf[258] === 0x73 &&\n buf[259] === 0x74 &&\n buf[260] === 0x61 &&\n buf[261] === 0x72\n\nasync function* extract<TReturn>(\n stream: AsyncIterable<Buffer>,\n extractor: Writable & AsyncIterable<TReturn>,\n) {\n // set up a task to drain the input iterable into the extractor asynchronously\n // before this function delegates to the extractor's iterable (containing the\n // result of the extraction)\n const drained = new Promise<void>((resolve, reject) => {\n // setTimeout is used here to ensure draining occurs after delegation\n setTimeout(async () => {\n try {\n for await (const chunk of stream) extractor.write(chunk)\n extractor.end()\n resolve()\n } catch (err) {\n reject(err)\n }\n })\n })\n\n // have this function delegate the results of the extractor\n yield* extractor\n await drained\n extractor.destroy()\n}\n\n/**\n * Given a async iterable of buffers, looks at the header of the file in the\n * first few bytes to see the file type then extracts the contents tries again.\n * If the given iterable of buffers is a tarball then it looks for an ndjson\n * files and returns another iterable of buffers with the contents of the\n * ndjson file\n */\nasync function* maybeExtractNdjson(stream: AsyncIterable<Buffer>): AsyncIterable<Buffer> {\n let buffer = Buffer.alloc(0)\n\n for await (const chunk of stream) {\n buffer = Buffer.concat([buffer, chunk])\n if (buffer.length < HEADER_SIZE) continue\n\n const fileHeader = buffer\n const restOfStream = async function* restOfStream() {\n yield fileHeader\n yield* stream\n }\n\n if (isGzip(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createGunzip()))\n return\n }\n\n if (isDeflate(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createDeflate()))\n return\n }\n\n if (isTar(fileHeader)) {\n for await (const entry of extract(restOfStream(), tar.extract())) {\n const filename = path.basename(entry.header.name)\n const extname = path.extname(filename).toLowerCase()\n // ignore hidden and non-ndjson files\n if (extname !== '.ndjson' || filename.startsWith('.')) continue\n\n for await (const ndjsonChunk of entry) yield ndjsonChunk\n return\n }\n }\n\n yield* restOfStream()\n }\n}\n\n/**\n * Takes in an async iterable of buffers from an ndjson file or tarball and\n * returns an async iterable of sanity documents.\n */\nexport async function* extractDocumentsFromNdjsonOrTarball(\n file: AsyncIterable<Buffer>,\n): AsyncIterable<SanityDocument> {\n const lines = readline.createInterface({\n input: Readable.from(maybeExtractNdjson(file)),\n })\n\n for await (const line of lines) {\n const trimmed = line.trim()\n if (trimmed) yield JSON.parse(trimmed) as SanityDocument\n }\n lines.close()\n}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import fs from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable} from 'node:stream'\nimport {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n type ClientConfig,\n createClient,\n type SanityClient,\n type SanityDocument,\n} from '@sanity/client'\nimport {isReference, type ValidationContext, type ValidationMarker} from '@sanity/types'\nimport {isRecord, validateDocument} from 'sanity'\n\nimport {extractDocumentsFromNdjsonOrTarball} from '../util/extractDocumentsFromNdjsonOrTarball'\nimport {getStudioWorkspaces} from '../util/getStudioWorkspaces'\nimport {mockBrowserEnvironment} from '../util/mockBrowserEnvironment'\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannels'\n\nconst MAX_VALIDATION_CONCURRENCY = 100\nconst DOCUMENT_VALIDATION_TIMEOUT = 30000\nconst REFERENCE_INTEGRITY_BATCH_SIZE = 100\n\ninterface AvailabilityResponse {\n omitted: {id: string; reason: 'existence' | 'permission'}[]\n}\n\n/** @internal */\nexport interface ValidateDocumentsWorkerData {\n workDir: string\n configPath?: string\n workspace?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string\n dataset?: string\n ndjsonFilePath?: string\n level?: ValidationMarker['level']\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n studioHost?: string\n}\n\n/** @internal */\nexport type ValidationWorkerChannel = WorkerChannel<{\n loadedWorkspace: WorkerChannelEvent<{\n name: string\n projectId: string\n dataset: string\n basePath: string\n }>\n loadedDocumentCount: WorkerChannelEvent<{documentCount: number}>\n exportProgress: WorkerChannelStream<{downloadedCount: number; documentCount: number}>\n exportFinished: WorkerChannelEvent<{totalDocumentsToValidate: number}>\n loadedReferenceIntegrity: WorkerChannelEvent\n validation: WorkerChannelStream<{\n validatedCount: number\n documentId: string\n documentType: string\n intentUrl?: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n }>\n}>\n\nconst {\n clientConfig,\n workDir,\n workspace: workspaceName,\n configPath,\n dataset,\n ndjsonFilePath,\n projectId,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost,\n} = _workerData as ValidateDocumentsWorkerData\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst levelValues = {error: 0, warning: 1, info: 2} as const\n\nconst report = createReporter<ValidationWorkerChannel>(parentPort)\n\nconst getReferenceIds = (value: unknown) => {\n const ids = new Set<string>()\n\n function traverse(node: unknown) {\n if (isReference(node)) {\n ids.add(node._ref)\n return\n }\n\n if (typeof node === 'object' && node) {\n // Note: this works for arrays too\n for (const item of Object.values(node)) traverse(item)\n }\n }\n\n traverse(value)\n\n return ids\n}\n\nconst idRegex = /^[^-][A-Z0-9._-]*$/i\n\n// during testing, the `doc` endpoint 502'ed if given an invalid ID\nconst isValidId = (id: unknown) => typeof id === 'string' && idRegex.test(id)\nconst shouldIncludeDocument = (document: SanityDocument) => {\n // Filter out system documents and sanity documents\n return !document._type.startsWith('system.') && !document._type.startsWith('sanity.')\n}\n\nasync function* readerToGenerator(reader: ReadableStreamDefaultReader<Uint8Array>) {\n while (true) {\n const {value, done} = await reader.read()\n if (value) yield value\n if (done) return\n }\n}\n\nvoid main().then(() => process.exit())\n\nasync function loadWorkspace() {\n const workspaces = await getStudioWorkspaces({basePath: workDir, configPath})\n\n if (!workspaces.length) {\n throw new Error(`Configuration did not return any workspaces.`)\n }\n\n let _workspace\n if (workspaceName) {\n _workspace = workspaces.find((w) => w.name === workspaceName)\n if (!_workspace) {\n throw new Error(`Could not find any workspaces with name \\`${workspaceName}\\``)\n }\n } else {\n if (workspaces.length !== 1) {\n throw new Error(\n \"Multiple workspaces found. Please specify which workspace to use with '--workspace'.\",\n )\n }\n _workspace = workspaces[0]\n }\n const workspace = _workspace\n\n const client = createClient({\n ...clientConfig,\n dataset: dataset || workspace.dataset,\n projectId: projectId || workspace.projectId,\n requestTagPrefix: 'sanity.cli.validate',\n }).config({apiVersion: 'v2021-03-25'})\n\n report.event.loadedWorkspace({\n projectId: workspace.projectId,\n dataset: workspace.dataset,\n name: workspace.name,\n basePath: workspace.basePath,\n })\n\n return {workspace, client}\n}\n\nasync function downloadFromExport(client: SanityClient) {\n const exportUrl = new URL(client.getUrl(`/data/export/${client.config().dataset}`, false))\n\n const documentCount = await client.fetch('length(*)')\n report.event.loadedDocumentCount({documentCount})\n\n const {token} = client.config()\n const response = await fetch(exportUrl, {\n headers: new Headers({...(token && {Authorization: `Bearer ${token}`})}),\n })\n\n const reader = response.body?.getReader()\n if (!reader) throw new Error('Could not get reader from response body.')\n\n let downloadedCount = 0\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const lines = readline.createInterface({input: Readable.from(readerToGenerator(reader))})\n\n // Note: we stream the export to a file and then re-read from that file to\n // make this less memory intensive.\n // this is a similar pattern to the import/export CLI commands\n const slugDate = new Date()\n .toISOString()\n .replace(/[^a-z0-9]/gi, '-')\n .toLowerCase()\n const tempOutputFile = path.join(os.tmpdir(), `sanity-validate-${slugDate}.ndjson`)\n const outputStream = fs.createWriteStream(tempOutputFile)\n\n for await (const line of lines) {\n const document = JSON.parse(line) as SanityDocument\n\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n\n outputStream.write(`${line}\\n`)\n }\n\n downloadedCount++\n report.stream.exportProgress.emit({downloadedCount, documentCount})\n }\n\n await new Promise<void>((resolve, reject) =>\n outputStream.close((err) => (err ? reject(err) : resolve())),\n )\n\n report.stream.exportProgress.end()\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n const getDocuments = () =>\n extractDocumentsFromNdjsonOrTarball(fs.createReadStream(tempOutputFile))\n\n return {documentIds, referencedIds, getDocuments, cleanup: () => fs.promises.rm(tempOutputFile)}\n}\n\nasync function downloadFromFile(filePath: string) {\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const getDocuments = () => extractDocumentsFromNdjsonOrTarball(fs.createReadStream(filePath))\n\n for await (const document of getDocuments()) {\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n }\n }\n\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n return {documentIds, referencedIds, getDocuments, cleanup: undefined}\n}\n\ninterface CheckReferenceExistenceOptions {\n client: SanityClient\n referencedIds: Set<string>\n documentIds: Set<string>\n}\n\nasync function checkReferenceExistence({\n client,\n documentIds,\n referencedIds: _referencedIds,\n}: CheckReferenceExistenceOptions) {\n const existingIds = new Set(documentIds)\n const idsToCheck = Array.from(_referencedIds)\n .filter((id) => !existingIds.has(id) && isValidId(id))\n .sort()\n\n const batches = idsToCheck.reduce<string[][]>(\n (acc, next, index) => {\n const batchIndex = Math.floor(index / REFERENCE_INTEGRITY_BATCH_SIZE)\n const batch = acc[batchIndex]\n batch.push(next)\n return acc\n },\n Array.from<string[]>({\n length: Math.ceil(idsToCheck.length / REFERENCE_INTEGRITY_BATCH_SIZE),\n }).map(() => []),\n )\n\n for (const batch of batches) {\n const {omitted} = await client.request<AvailabilityResponse>({\n uri: client.getDataUrl('doc', batch.join(',')),\n json: true,\n query: {excludeContent: 'true'},\n tag: 'documents-availability',\n })\n\n const omittedIds = omitted.reduce<Record<string, 'existence' | 'permission'>>((acc, next) => {\n acc[next.id] = next.reason\n return acc\n }, {})\n\n for (const id of batch) {\n // unless the document ID is in the `omitted` object explictly due to\n // the reason `'existence'`, then it should exist\n if (omittedIds[id] !== 'existence') {\n existingIds.add(id)\n }\n }\n }\n report.event.loadedReferenceIntegrity()\n\n return {existingIds}\n}\n\nasync function main() {\n // note: this is dynamically imported because this module is ESM only and this\n // file gets compiled to CJS at this time\n const {default: pMap} = await import('p-map')\n\n const cleanupBrowserEnvironment = mockBrowserEnvironment(workDir)\n\n let cleanupDownloadedDocuments: (() => Promise<void>) | undefined\n\n try {\n const {client, workspace} = await loadWorkspace()\n const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath\n ? await downloadFromFile(ndjsonFilePath)\n : await downloadFromExport(client)\n cleanupDownloadedDocuments = cleanup\n const {existingIds} = await checkReferenceExistence({client, referencedIds, documentIds})\n\n const getClient = <TOptions extends Partial<ClientConfig>>(options: TOptions) =>\n client.withConfig(options)\n\n const getDocumentExists: ValidationContext['getDocumentExists'] = ({id}) =>\n Promise.resolve(existingIds.has(id))\n\n const getLevel = (markers: ValidationMarker[]) => {\n let foundWarning = false\n for (const marker of markers) {\n if (marker.level === 'error') return 'error'\n if (marker.level === 'warning') foundWarning = true\n }\n\n if (foundWarning) return 'warning'\n return 'info'\n }\n\n let validatedCount = 0\n\n const validate = async (document: SanityDocument) => {\n let markers: ValidationMarker[]\n\n try {\n const timeout = Symbol('timeout')\n\n const result = await Promise.race([\n validateDocument({\n document,\n workspace,\n getClient,\n getDocumentExists,\n environment: 'cli',\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n }),\n new Promise<typeof timeout>((resolve) =>\n setTimeout(() => resolve(timeout), DOCUMENT_VALIDATION_TIMEOUT),\n ),\n ])\n\n if (result === timeout) {\n throw new Error(\n `Document '${document._id}' failed to validate within ${DOCUMENT_VALIDATION_TIMEOUT}ms.`,\n )\n }\n\n markers = result\n // remove deprecated `item` from the marker\n .map(({item, ...marker}) => marker)\n // filter out unwanted levels\n .filter((marker) => {\n const markerValue = levelValues[marker.level]\n const flagLevelValue =\n levelValues[level as keyof typeof levelValues] ?? levelValues.info\n return markerValue <= flagLevelValue\n })\n } catch (err) {\n const errorMessage =\n isRecord(err) && typeof err.message === 'string' ? err.message : 'Unknown error'\n\n const message = `Exception occurred while validating value: ${errorMessage}`\n\n markers = [\n {\n message,\n level: 'error',\n path: [],\n },\n ]\n }\n\n validatedCount++\n\n const intentUrl =\n studioHost &&\n `${studioHost}${path.resolve(\n workspace.basePath,\n `/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent(\n document._type,\n )}`,\n )}`\n\n report.stream.validation.emit({\n documentId: document._id,\n documentType: document._type,\n revision: document._rev,\n ...(intentUrl && {intentUrl}),\n markers,\n validatedCount,\n level: getLevel(markers),\n })\n }\n\n await pMap(getDocuments(), validate, {concurrency: MAX_VALIDATION_CONCURRENCY})\n\n report.stream.validation.end()\n } finally {\n await cleanupDownloadedDocuments?.()\n cleanupBrowserEnvironment()\n }\n}\n"],"names":["HEADER_SIZE","isGzip","buf","length","isDeflate","isTar","extract","stream","extractor","drained","Promise","resolve","reject","setTimeout","chunk","write","end","err","destroy","maybeExtractNdjson","buffer","Buffer","alloc","concat","fileHeader","restOfStream","zlib","createGunzip","createDeflate","entry","tar","filename","path","basename","header","name","extname","toLowerCase","startsWith","ndjsonChunk","extractDocumentsFromNdjsonOrTarball","file","lines","readline","createInterface","input","Readable","from","line","trimmed","trim","JSON","parse","close","createReporter","parentPort","Error","event","Proxy","get","target","payload","message","type","postMessage","emit","MAX_VALIDATION_CONCURRENCY","DOCUMENT_VALIDATION_TIMEOUT","REFERENCE_INTEGRITY_BATCH_SIZE","clientConfig","workDir","workspace","workspaceName","configPath","dataset","ndjsonFilePath","projectId","level","maxCustomValidationConcurrency","maxFetchConcurrency","studioHost","_workerData","isMainThread","levelValues","error","warning","info","report","getReferenceIds","value","ids","Set","traverse","node","isReference","add","_ref","item","Object","values","idRegex","isValidId","id","test","shouldIncludeDocument","document","_type","readerToGenerator","reader","done","read","main","then","process","exit","loadWorkspace","workspaces","getStudioWorkspaces","basePath","_workspace","find","w","client","createClient","requestTagPrefix","config","apiVersion","loadedWorkspace","downloadFromExport","exportUrl","URL","getUrl","documentCount","fetch","loadedDocumentCount","token","headers","Headers","Authorization","body","getReader","downloadedCount","referencedIds","documentIds","slugDate","Date","toISOString","replace","tempOutputFile","join","os","tmpdir","outputStream","fs","createWriteStream","_id","referenceId","exportProgress","exportFinished","totalDocumentsToValidate","size","getDocuments","createReadStream","cleanup","promises","rm","downloadFromFile","filePath","undefined","checkReferenceExistence","_referencedIds","existingIds","idsToCheck","Array","filter","has","sort","batches","reduce","acc","next","index","batchIndex","Math","floor","batch","push","ceil","map","omitted","request","uri","getDataUrl","json","query","excludeContent","tag","omittedIds","reason","loadedReferenceIntegrity","default","pMap","cleanupBrowserEnvironment","mockBrowserEnvironment","cleanupDownloadedDocuments","getClient","options","withConfig","getDocumentExists","getLevel","markers","foundWarning","marker","validatedCount","validate","timeout","Symbol","result","race","validateDocument","environment","markerValue","flagLevelValue","isRecord","intentUrl","encodeURIComponent","validation","documentId","documentType","revision","_rev","concurrency"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQA,MAAMA,cAAc,KAGdC,SAAUC,CAAAA,QACdA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,MAAQA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,GAGhEE,YAAaF,CAAAA,QACjBA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,QAASA,IAAI,CAAC,MAAM,KAAKA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,MAGjFG,QAASH,CAAAA,QACbA,IAAIC,UAAU,OACdD,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,MACbA,IAAI,GAAG,MAAM;AAEf,gBAAgBI,QACdC,QACAC,WACA;AAIA,QAAMC,UAAU,IAAIC,QAAc,CAACC,SAASC,WAAW;AAErDC,eAAW,YAAY;AACrB,UAAI;AACF,yBAAiBC,SAASP,OAAQC,WAAUO,MAAMD,KAAK;AACvDN,kBAAUQ,IAAAA,GACVL,QAAAA;AAAAA,MACF,SAASM,KAAK;AACZL,eAAOK,GAAG;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,SAAOT,WACP,MAAMC,SACND,UAAUU,QAAAA;AACZ;AASA,gBAAgBC,mBAAmBZ,QAAsD;AACvF,MAAIa,SAASC,OAAOC,MAAM,CAAC;AAE3B,mBAAiBR,SAASP,QAAQ;AAEhC,QADAa,SAASC,OAAOE,OAAO,CAACH,QAAQN,KAAK,CAAC,GAClCM,OAAOjB,SAASH,YAAa;AAEjC,UAAMwB,aAAaJ,QACbK,eAAe,mBAA+B;AAClD,YAAMD,YACN,OAAOjB;AAAAA,IACT;AAEA,QAAIN,OAAOuB,UAAU,GAAG;AACtB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKC,aAAAA,CAAc,CAAC;AACtE;AAAA,IACF;AAEA,QAAIvB,UAAUoB,UAAU,GAAG;AACzB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKE,cAAAA,CAAe,CAAC;AACvE;AAAA,IACF;AAEA,QAAIvB,MAAMmB,UAAU;AAClB,uBAAiBK,SAASvB,QAAQmB,aAAAA,GAAgBK,aAAAA,QAAIxB,QAAAA,CAAS,GAAG;AAChE,cAAMyB,WAAWC,cAAAA,QAAKC,SAASJ,MAAMK,OAAOC,IAAI;AAGhD,YAAIC,EAFYJ,cAAAA,QAAKI,QAAQL,QAAQ,EAAEM,kBAEvB,aAAaN,SAASO,WAAW,GAAG,IAEpD;AAAA,2BAAiBC,eAAeV,MAAO,OAAMU;AAC7C;AAAA,QAAA;AAAA,MACF;AAGF,WAAOd,aAAAA;AAAAA,EACT;AACF;AAMA,gBAAuBe,oCACrBC,MAC+B;AAC/B,QAAMC,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IACrCC,OAAOC,YAAAA,SAASC,KAAK5B,mBAAmBsB,IAAI,CAAC;AAAA,EAAA,CAC9C;AAED,mBAAiBO,QAAQN,OAAO;AAC9B,UAAMO,UAAUD,KAAKE,KAAAA;AACjBD,gBAAS,MAAME,KAAKC,MAAMH,OAAO;AAAA,EACvC;AACAP,QAAMW,MAAAA;AACR;AC0EO,SAASC,eACdC,YACuC;AACvC,MAAI,CAACA;AACH,UAAM,IAAIC,MAAM,sBAAsB;AAGxC,SAAO;AAAA,IACLC,OAAO,IAAIC,MAAM,IAAsD;AAAA,MACrEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAEV0B,CAAAA,YAAY;AAChD,cAAMC,UAAwB;AAAA,UAACC,MAAM;AAAA,UAAS5B;AAAAA,UAAM0B;AAAAA,QAAAA;AACpDN,mBAAWS,YAAYF,OAAO;AAAA,MAChC;AAAA,IAAA,CAIH;AAAA,IACDvD,QAAQ,IAAImD,MAAM,IAAuD;AAAA,MACvEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAET;AAAA,QACrC8B,MAAOJ,CAAAA,YAAY;AACjB,gBAAMC,UAAiC;AAAA,YAACC,MAAM;AAAA,YAAY5B;AAAAA,YAAM0B;AAAAA,UAAAA;AAChEN,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,QACA9C,KAAKA,MAAM;AACT,gBAAM8C,UAA4B;AAAA,YAACC,MAAM;AAAA,YAAO5B;AAAAA,UAAAA;AAChDoB,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,MAAA;AAAA,IACF,CAIH;AAAA,EAAA;AAEL;AC1MA,MAAMI,6BAA6B,KAC7BC,8BAA8B,KAC9BC,iCAAiC,KA4CjC;AAAA,EACJC;AAAAA,EACAC;AAAAA,EACAC,WAAWC;AAAAA,EACXC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AACF,IAAIC,oBAAAA;AAEJ,IAAIC,oBAAAA,gBAAgB,CAAC3B,oBAAAA;AACnB,QAAM,IAAIC,MAAM,4CAA4C;AAG9D,MAAM2B,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAE5CC,SAASjC,eAAwCC,oBAAAA,UAAU,GAE3DiC,kBAAmBC,CAAAA,UAAmB;AAC1C,QAAMC,0BAAUC,IAAAA;AAEhB,WAASC,SAASC,MAAe;AAC/B,QAAIC,MAAAA,YAAYD,IAAI,GAAG;AACrBH,UAAIK,IAAIF,KAAKG,IAAI;AACjB;AAAA,IACF;AAEA,QAAI,OAAOH,QAAS,YAAYA;AAE9B,iBAAWI,QAAQC,OAAOC,OAAON,IAAI,YAAYI,IAAI;AAAA,EAEzD;AAEAL,SAAAA,SAASH,KAAK,GAEPC;AACT,GAEMU,UAAU,uBAGVC,YAAaC,CAAAA,OAAgB,OAAOA,MAAO,YAAYF,QAAQG,KAAKD,EAAE,GACtEE,wBAAyBC,CAAAA,aAEtB,CAACA,SAASC,MAAMpE,WAAW,SAAS,KAAK,CAACmE,SAASC,MAAMpE,WAAW,SAAS;AAGtF,gBAAgBqE,kBAAkBC,QAAiD;AACjF,aAAa;AACX,UAAM;AAAA,MAACnB;AAAAA,MAAOoB;AAAAA,IAAAA,IAAQ,MAAMD,OAAOE,KAAAA;AAEnC,QADIrB,UAAO,MAAMA,QACboB,KAAM;AAAA,EACZ;AACF;AAEKE,KAAAA,EAAOC,KAAK,MAAMC,QAAQC,MAAM;AAErC,eAAeC,gBAAgB;AAC7B,QAAMC,aAAa,MAAMC,wCAAoB;AAAA,IAACC,UAAUhD;AAAAA,IAASG;AAAAA,EAAAA,CAAW;AAE5E,MAAI,CAAC2C,WAAWjH;AACd,UAAM,IAAIqD,MAAM,8CAA8C;AAGhE,MAAI+D;AACJ,MAAI/C;AAEF,QADA+C,aAAaH,WAAWI,KAAMC,CAAAA,MAAMA,EAAEtF,SAASqC,aAAa,GACxD,CAAC+C;AACH,YAAM,IAAI/D,MAAM,6CAA6CgB,aAAa,IAAI;AAAA,SAE3E;AACL,QAAI4C,WAAWjH,WAAW;AACxB,YAAM,IAAIqD,MACR,sFACF;AAEF+D,iBAAaH,WAAW,CAAC;AAAA,EAC3B;AACA,QAAM7C,YAAYgD,YAEZG,WAASC,oBAAa;AAAA,IAC1B,GAAGtD;AAAAA,IACHK,SAASA,WAAWH,UAAUG;AAAAA,IAC9BE,WAAWA,aAAaL,UAAUK;AAAAA,IAClCgD,kBAAkB;AAAA,EAAA,CACnB,EAAEC,OAAO;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc;AAErCvC,SAAAA,OAAO9B,MAAMsE,gBAAgB;AAAA,IAC3BnD,WAAWL,UAAUK;AAAAA,IACrBF,SAASH,UAAUG;AAAAA,IACnBvC,MAAMoC,UAAUpC;AAAAA,IAChBmF,UAAU/C,UAAU+C;AAAAA,EAAAA,CACrB,GAEM;AAAA,IAAC/C;AAAAA,IAAAA,QAAWmD;AAAAA,EAAAA;AACrB;AAEA,eAAeM,mBAAmBN,SAAsB;AACtD,QAAMO,YAAY,IAAIC,IAAIR,QAAOS,OAAO,gBAAgBT,QAAOG,OAAAA,EAASnD,OAAO,IAAI,EAAK,CAAC,GAEnF0D,gBAAgB,MAAMV,QAAOW,MAAM,WAAW;AACpD9C,SAAO9B,MAAM6E,oBAAoB;AAAA,IAACF;AAAAA,EAAAA,CAAc;AAEhD,QAAM;AAAA,IAACG;AAAAA,EAAAA,IAASb,QAAOG,OAAAA,GAKjBjB,UAJW,MAAMyB,MAAMJ,WAAW;AAAA,IACtCO,SAAS,IAAIC,QAAQ;AAAA,MAAC,GAAIF,SAAS;AAAA,QAACG,eAAe,UAAUH,KAAK;AAAA,MAAA;AAAA,IAAE,CAAG;AAAA,EAAA,CACxE,GAEuBI,MAAMC,UAAAA;AAC9B,MAAI,CAAChC,OAAQ,OAAM,IAAIpD,MAAM,0CAA0C;AAEvE,MAAIqF,kBAAkB;AACtB,QAAMC,gBAAgB,oBAAInD,IAAAA,GACpBoD,kCAAkBpD,OAClBjD,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IAACC,OAAOC,YAAAA,SAASC,KAAK4D,kBAAkBC,MAAM,CAAC;AAAA,EAAA,CAAE,GAKlFoC,YAAW,oBAAIC,KAAAA,GAClBC,YAAAA,EACAC,QAAQ,eAAe,GAAG,EAC1B9G,YAAAA,GACG+G,iBAAiBpH,sBAAKqH,KAAKC,YAAAA,QAAGC,OAAAA,GAAU,mBAAmBP,QAAQ,SAAS,GAC5EQ,eAAeC,oBAAGC,kBAAkBN,cAAc;AAExD,mBAAiBpG,QAAQN,OAAO;AAC9B,UAAM+D,WAAWtD,KAAKC,MAAMJ,IAAI;AAEhC,QAAIwD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAG/BJ,mBAAazI,MAAM,GAAGiC,IAAI;AAAA,CAAI;AAAA,IAChC;AAEA6F,uBACAtD,OAAOhF,OAAOsJ,eAAe5F,KAAK;AAAA,MAAC4E;AAAAA,MAAiBT;AAAAA,IAAAA,CAAc;AAAA,EACpE;AAEA,SAAA,MAAM,IAAI1H,QAAc,CAACC,SAASC,WAChC4I,aAAanG,MAAOpC,CAAAA,QAASA,MAAML,OAAOK,GAAG,IAAIN,QAAAA,CAAU,CAC7D,GAEA4E,OAAOhF,OAAOsJ,eAAe7I,OAC7BuE,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAKjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB,cAHfA,MACnBzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBd,cAAc,CAAC;AAAA,IAEvBe,SAASA,MAAMV,YAAAA,QAAGW,SAASC,GAAGjB,cAAc;AAAA,EAAA;AAChG;AAEA,eAAekB,iBAAiBC,UAAkB;AAChD,QAAMzB,gBAAgB,oBAAInD,OACpBoD,cAAc,oBAAIpD,IAAAA,GAClBsE,eAAeA,MAAMzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBK,QAAQ,CAAC;AAE5F,mBAAiB9D,YAAYwD,aAAAA;AAC3B,QAAIzD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAAA,IAEjC;AAGFrE,SAAAA,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAEjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB;AAAAA,IAAcE,SAASK;AAAAA,EAAAA;AAC7D;AAQA,eAAeC,wBAAwB;AAAA,EACrC/C,QAAAA;AAAAA,EACAqB;AAAAA,EACAD,eAAe4B;AACe,GAAG;AACjC,QAAMC,cAAc,IAAIhF,IAAIoD,WAAW,GACjC6B,aAAaC,MAAM9H,KAAK2H,cAAc,EACzCI,OAAQxE,CAAAA,OAAO,CAACqE,YAAYI,IAAIzE,EAAE,KAAKD,UAAUC,EAAE,CAAC,EACpD0E,KAAAA,GAEGC,UAAUL,WAAWM,OACzB,CAACC,KAAKC,MAAMC,UAAU;AACpB,UAAMC,aAAaC,KAAKC,MAAMH,QAAQjH,8BAA8B;AAEpEqH,WADcN,IAAIG,UAAU,EACtBI,KAAKN,IAAI,GACRD;AAAAA,EACT,GACAN,MAAM9H,KAAe;AAAA,IACnB5C,QAAQoL,KAAKI,KAAKf,WAAWzK,SAASiE,8BAA8B;AAAA,EAAA,CACrE,EAAEwH,IAAI,MAAM,CAAA,CAAE,CACjB;AAEA,aAAWH,SAASR,SAAS;AAC3B,UAAM;AAAA,MAACY;AAAAA,IAAAA,IAAW,MAAMnE,QAAOoE,QAA8B;AAAA,MAC3DC,KAAKrE,QAAOsE,WAAW,OAAOP,MAAMpC,KAAK,GAAG,CAAC;AAAA,MAC7C4C,MAAM;AAAA,MACNC,OAAO;AAAA,QAACC,gBAAgB;AAAA,MAAA;AAAA,MACxBC,KAAK;AAAA,IAAA,CACN,GAEKC,aAAaR,QAAQX,OAAmD,CAACC,KAAKC,UAClFD,IAAIC,KAAK9E,EAAE,IAAI8E,KAAKkB,QACbnB,MACN,CAAA,CAAE;AAEL,eAAW7E,MAAMmF;AAGXY,iBAAW/F,EAAE,MAAM,eACrBqE,YAAY5E,IAAIO,EAAE;AAAA,EAGxB;AACAf,SAAAA,OAAO9B,MAAM8I,4BAEN;AAAA,IAAC5B;AAAAA,EAAAA;AACV;AAEA,eAAe5D,OAAO;AAGpB,QAAM;AAAA,IAACyF,SAASC;AAAAA,EAAAA,IAAQ,MAAM,OAAO,OAAO,GAEtCC,4BAA4BC,uBAAAA,uBAAuBrI,OAAO;AAEhE,MAAIsI;AAEJ,MAAI;AACF,UAAM;AAAA,MAAClF,QAAAA;AAAAA,MAAQnD;AAAAA,IAAAA,IAAa,MAAM4C,cAAAA,GAC5B;AAAA,MAAC4B;AAAAA,MAAaD;AAAAA,MAAemB;AAAAA,MAAcE;AAAAA,IAAAA,IAAWxF,iBACxD,MAAM2F,iBAAiB3F,cAAc,IACrC,MAAMqD,mBAAmBN,OAAM;AACnCkF,iCAA6BzC;AAC7B,UAAM;AAAA,MAACQ;AAAAA,IAAAA,IAAe,MAAMF,wBAAwB;AAAA,MAAC/C,QAAAA;AAAAA,MAAQoB;AAAAA,MAAeC;AAAAA,IAAAA,CAAY,GAElF8D,YAAqDC,CAAAA,YACzDpF,QAAOqF,WAAWD,OAAO,GAErBE,oBAA4DA,CAAC;AAAA,MAAC1G;AAAAA,IAAAA,MAClE5F,QAAQC,QAAQgK,YAAYI,IAAIzE,EAAE,CAAC,GAE/B2G,WAAYC,CAAAA,YAAgC;AAChD,UAAIC,eAAe;AACnB,iBAAWC,UAAUF,SAAS;AAC5B,YAAIE,OAAOvI,UAAU,QAAS,QAAO;AACjCuI,eAAOvI,UAAU,cAAWsI,eAAe;AAAA,MACjD;AAEA,aAAIA,eAAqB,YAClB;AAAA,IACT;AAEA,QAAIE,iBAAiB;AAErB,UAAMC,WAAW,OAAO7G,aAA6B;AACnD,UAAIyG;AAEJ,UAAI;AACF,cAAMK,UAAUC,OAAO,SAAS,GAE1BC,SAAS,MAAM/M,QAAQgN,KAAK,CAChCC,wBAAiB;AAAA,UACflH;AAAAA,UACAlC;AAAAA,UACAsI;AAAAA,UACAG;AAAAA,UACAY,aAAa;AAAA,UACb9I;AAAAA,UACAC;AAAAA,QAAAA,CACD,GACD,IAAIrE,QAAyBC,CAAAA,YAC3BE,WAAW,MAAMF,QAAQ4M,OAAO,GAAGpJ,2BAA2B,CAChE,CAAC,CACF;AAED,YAAIsJ,WAAWF;AACb,gBAAM,IAAI/J,MACR,aAAaiD,SAASkD,GAAG,+BAA+BxF,2BAA2B,KACrF;AAGF+I,kBAAUO,OAEP7B,IAAI,CAAC;AAAA,UAAC3F;AAAAA,UAAM,GAAGmH;AAAAA,QAAAA,MAAYA,MAAM,EAEjCtC,OAAQsC,CAAAA,WAAW;AAClB,gBAAMS,cAAc1I,YAAYiI,OAAOvI,KAAK,GACtCiJ,iBACJ3I,YAAYN,KAAK,KAAiCM,YAAYG;AAChE,iBAAOuI,eAAeC;AAAAA,QACxB,CAAC;AAAA,MACL,SAAS7M,KAAK;AAMZiM,kBAAU,CACR;AAAA,UACEpJ,SAJY,8CAFdiK,OAAAA,SAAS9M,GAAG,KAAK,OAAOA,IAAI6C,WAAY,WAAW7C,IAAI6C,UAAU,eAEO;AAAA,UAKtEe,OAAO;AAAA,UACP7C,MAAM,CAAA;AAAA,QAAA,CACP;AAAA,MAEL;AAEAqL;AAEA,YAAMW,YACJhJ,cACA,GAAGA,UAAU,GAAGhD,cAAAA,QAAKrB,QACnB4D,UAAU+C,UACV,mBAAmB2G,mBAAmBxH,SAASkD,GAAG,CAAC,SAASsE,mBAC1DxH,SAASC,KACX,CAAC,EACH,CAAC;AAEHnB,aAAOhF,OAAO2N,WAAWjK,KAAK;AAAA,QAC5BkK,YAAY1H,SAASkD;AAAAA,QACrByE,cAAc3H,SAASC;AAAAA,QACvB2H,UAAU5H,SAAS6H;AAAAA,QACnB,GAAIN,aAAa;AAAA,UAACA;AAAAA,QAAAA;AAAAA,QAClBd;AAAAA,QACAG;AAAAA,QACAxI,OAAOoI,SAASC,OAAO;AAAA,MAAA,CACxB;AAAA,IACH;AAEA,UAAMT,KAAKxC,aAAAA,GAAgBqD,UAAU;AAAA,MAACiB,aAAarK;AAAAA,IAAAA,CAA2B,GAE9EqB,OAAOhF,OAAO2N,WAAWlN,IAAAA;AAAAA,EAC3B,UAAA;AACE,UAAM4L,6BAAAA,GACNF,0BAAAA;AAAAA,EACF;AACF;"}
|
|
1
|
+
{"version":3,"file":"validateDocuments.cjs","sources":["../../../../src/_internal/cli/util/extractDocumentsFromNdjsonOrTarball.ts","../../../../src/_internal/cli/util/workerChannels.ts","../../../../src/_internal/cli/threads/validateDocuments.ts"],"sourcesContent":["import path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable, type Writable} from 'node:stream'\nimport zlib from 'node:zlib'\n\nimport {type SanityDocument} from '@sanity/types'\nimport tar from 'tar-stream'\n\nconst HEADER_SIZE = 300\n\n// https://github.com/kevva/is-gzip/blob/13dab7c877787bd5cff9de5482b1736f00df99c6/index.js\nconst isGzip = (buf: Buffer) =>\n buf.length >= 3 && buf[0] === 0x1f && buf[1] === 0x8b && buf[2] === 0x08\n\n// https://github.com/watson/is-deflate/blob/f9e8f0c7814eed715e13e29e97c69acee319686a/index.js\nconst isDeflate = (buf: Buffer) =>\n buf.length >= 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n\n// https://github.com/kevva/is-tar/blob/d295ffa2002a5d415946fc3d49f024ace8c28bd3/index.js\nconst isTar = (buf: Buffer) =>\n buf.length >= 262 &&\n buf[257] === 0x75 &&\n buf[258] === 0x73 &&\n buf[259] === 0x74 &&\n buf[260] === 0x61 &&\n buf[261] === 0x72\n\nasync function* extract<TReturn>(\n stream: AsyncIterable<Buffer>,\n extractor: Writable & AsyncIterable<TReturn>,\n) {\n // set up a task to drain the input iterable into the extractor asynchronously\n // before this function delegates to the extractor's iterable (containing the\n // result of the extraction)\n const drained = new Promise<void>((resolve, reject) => {\n // setTimeout is used here to ensure draining occurs after delegation\n setTimeout(async () => {\n try {\n for await (const chunk of stream) extractor.write(chunk)\n extractor.end()\n resolve()\n } catch (err) {\n reject(err)\n }\n })\n })\n\n // have this function delegate the results of the extractor\n yield* extractor\n await drained\n extractor.destroy()\n}\n\n/**\n * Given a async iterable of buffers, looks at the header of the file in the\n * first few bytes to see the file type then extracts the contents tries again.\n * If the given iterable of buffers is a tarball then it looks for an ndjson\n * files and returns another iterable of buffers with the contents of the\n * ndjson file\n */\nasync function* maybeExtractNdjson(stream: AsyncIterable<Buffer>): AsyncIterable<Buffer> {\n let buffer = Buffer.alloc(0)\n\n for await (const chunk of stream) {\n buffer = Buffer.concat([buffer, chunk])\n if (buffer.length < HEADER_SIZE) continue\n\n const fileHeader = buffer\n const restOfStream = async function* restOfStream() {\n yield fileHeader\n yield* stream\n }\n\n if (isGzip(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createGunzip()))\n return\n }\n\n if (isDeflate(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createDeflate()))\n return\n }\n\n if (isTar(fileHeader)) {\n for await (const entry of extract(restOfStream(), tar.extract())) {\n const filename = path.basename(entry.header.name)\n const extname = path.extname(filename).toLowerCase()\n // ignore hidden and non-ndjson files\n if (extname !== '.ndjson' || filename.startsWith('.')) continue\n\n for await (const ndjsonChunk of entry) yield ndjsonChunk\n return\n }\n }\n\n yield* restOfStream()\n }\n}\n\n/**\n * Takes in an async iterable of buffers from an ndjson file or tarball and\n * returns an async iterable of sanity documents.\n */\nexport async function* extractDocumentsFromNdjsonOrTarball(\n file: AsyncIterable<Buffer>,\n): AsyncIterable<SanityDocument> {\n const lines = readline.createInterface({\n input: Readable.from(maybeExtractNdjson(file)),\n })\n\n for await (const line of lines) {\n const trimmed = line.trim()\n if (trimmed) yield JSON.parse(trimmed) as SanityDocument\n }\n lines.close()\n}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import fs from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable} from 'node:stream'\nimport {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n type ClientConfig,\n createClient,\n type SanityClient,\n type SanityDocument,\n} from '@sanity/client'\nimport {isReference, type ValidationContext, type ValidationMarker} from '@sanity/types'\nimport {isRecord, validateDocument} from 'sanity'\n\nimport {extractDocumentsFromNdjsonOrTarball} from '../util/extractDocumentsFromNdjsonOrTarball'\nimport {getStudioWorkspaces} from '../util/getStudioWorkspaces'\nimport {mockBrowserEnvironment} from '../util/mockBrowserEnvironment'\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannels'\n\nconst MAX_VALIDATION_CONCURRENCY = 100\nconst DOCUMENT_VALIDATION_TIMEOUT = 30000\nconst REFERENCE_INTEGRITY_BATCH_SIZE = 100\n\ninterface AvailabilityResponse {\n omitted: {id: string; reason: 'existence' | 'permission'}[]\n}\n\n/** @internal */\nexport interface ValidateDocumentsWorkerData {\n workDir: string\n configPath?: string\n workspace?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string\n dataset?: string\n ndjsonFilePath?: string\n level?: ValidationMarker['level']\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n studioHost?: string\n}\n\n/** @internal */\nexport type ValidationWorkerChannel = WorkerChannel<{\n loadedWorkspace: WorkerChannelEvent<{\n name: string\n projectId: string\n dataset: string\n basePath: string\n }>\n loadedDocumentCount: WorkerChannelEvent<{documentCount: number}>\n exportProgress: WorkerChannelStream<{downloadedCount: number; documentCount: number}>\n exportFinished: WorkerChannelEvent<{totalDocumentsToValidate: number}>\n loadedReferenceIntegrity: WorkerChannelEvent\n validation: WorkerChannelStream<{\n validatedCount: number\n documentId: string\n documentType: string\n intentUrl?: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n }>\n}>\n\nconst {\n clientConfig,\n workDir,\n workspace: workspaceName,\n configPath,\n dataset,\n ndjsonFilePath,\n projectId,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost,\n} = _workerData as ValidateDocumentsWorkerData\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst levelValues = {error: 0, warning: 1, info: 2} as const\n\nconst report = createReporter<ValidationWorkerChannel>(parentPort)\n\nconst getReferenceIds = (value: unknown) => {\n const ids = new Set<string>()\n\n function traverse(node: unknown) {\n if (isReference(node)) {\n ids.add(node._ref)\n return\n }\n\n if (typeof node === 'object' && node) {\n // Note: this works for arrays too\n for (const item of Object.values(node)) traverse(item)\n }\n }\n\n traverse(value)\n\n return ids\n}\n\nconst idRegex = /^[^-][A-Z0-9._-]*$/i\n\n// during testing, the `doc` endpoint 502'ed if given an invalid ID\nconst isValidId = (id: unknown) => typeof id === 'string' && idRegex.test(id)\nconst shouldIncludeDocument = (document: SanityDocument) => {\n // Filter out system documents and sanity documents\n return !document._type.startsWith('system.') && !document._type.startsWith('sanity.')\n}\n\nasync function* readerToGenerator(reader: ReadableStreamDefaultReader<Uint8Array>) {\n while (true) {\n const {value, done} = await reader.read()\n if (value) yield value\n if (done) return\n }\n}\n\nvoid main().then(() => process.exit())\n\nasync function loadWorkspace() {\n const workspaces = await getStudioWorkspaces({basePath: workDir, configPath})\n\n if (!workspaces.length) {\n throw new Error(`Configuration did not return any workspaces.`)\n }\n\n let _workspace\n if (workspaceName) {\n _workspace = workspaces.find((w) => w.name === workspaceName)\n if (!_workspace) {\n throw new Error(`Could not find any workspaces with name \\`${workspaceName}\\``)\n }\n } else {\n if (workspaces.length !== 1) {\n throw new Error(\n \"Multiple workspaces found. Please specify which workspace to use with '--workspace'.\",\n )\n }\n _workspace = workspaces[0]\n }\n const workspace = _workspace\n\n const client = createClient({\n ...clientConfig,\n dataset: dataset || workspace.dataset,\n projectId: projectId || workspace.projectId,\n requestTagPrefix: 'sanity.cli.validate',\n }).config({apiVersion: 'v2021-03-25'})\n\n report.event.loadedWorkspace({\n projectId: workspace.projectId,\n dataset: workspace.dataset,\n name: workspace.name,\n basePath: workspace.basePath,\n })\n\n return {workspace, client}\n}\n\nasync function downloadFromExport(client: SanityClient) {\n const exportUrl = new URL(client.getUrl(`/data/export/${client.config().dataset}`, false))\n\n const documentCount = await client.fetch('length(*)')\n report.event.loadedDocumentCount({documentCount})\n\n const {token} = client.config()\n const response = await fetch(exportUrl, {\n headers: new Headers({...(token && {Authorization: `Bearer ${token}`})}),\n })\n\n const reader = response.body?.getReader()\n if (!reader) throw new Error('Could not get reader from response body.')\n\n let downloadedCount = 0\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const lines = readline.createInterface({input: Readable.from(readerToGenerator(reader))})\n\n // Note: we stream the export to a file and then re-read from that file to\n // make this less memory intensive.\n // this is a similar pattern to the import/export CLI commands\n const slugDate = new Date()\n .toISOString()\n .replace(/[^a-z0-9]/gi, '-')\n .toLowerCase()\n const tempOutputFile = path.join(os.tmpdir(), `sanity-validate-${slugDate}.ndjson`)\n const outputStream = fs.createWriteStream(tempOutputFile)\n\n for await (const line of lines) {\n const document = JSON.parse(line) as SanityDocument\n\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n\n outputStream.write(`${line}\\n`)\n }\n\n downloadedCount++\n report.stream.exportProgress.emit({downloadedCount, documentCount})\n }\n\n await new Promise<void>((resolve, reject) =>\n outputStream.close((err) => (err ? reject(err) : resolve())),\n )\n\n report.stream.exportProgress.end()\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n const getDocuments = () =>\n extractDocumentsFromNdjsonOrTarball(fs.createReadStream(tempOutputFile))\n\n return {documentIds, referencedIds, getDocuments, cleanup: () => fs.promises.rm(tempOutputFile)}\n}\n\nasync function downloadFromFile(filePath: string) {\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const getDocuments = () => extractDocumentsFromNdjsonOrTarball(fs.createReadStream(filePath))\n\n for await (const document of getDocuments()) {\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n }\n }\n\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n return {documentIds, referencedIds, getDocuments, cleanup: undefined}\n}\n\ninterface CheckReferenceExistenceOptions {\n client: SanityClient\n referencedIds: Set<string>\n documentIds: Set<string>\n}\n\nasync function checkReferenceExistence({\n client,\n documentIds,\n referencedIds: _referencedIds,\n}: CheckReferenceExistenceOptions) {\n const existingIds = new Set(documentIds)\n const idsToCheck = Array.from(_referencedIds)\n .filter((id) => !existingIds.has(id) && isValidId(id))\n .sort()\n\n const batches = idsToCheck.reduce<string[][]>(\n (acc, next, index) => {\n const batchIndex = Math.floor(index / REFERENCE_INTEGRITY_BATCH_SIZE)\n const batch = acc[batchIndex]\n batch.push(next)\n return acc\n },\n Array.from<string[]>({\n length: Math.ceil(idsToCheck.length / REFERENCE_INTEGRITY_BATCH_SIZE),\n }).map(() => []),\n )\n\n for (const batch of batches) {\n const {omitted} = await client.request<AvailabilityResponse>({\n uri: client.getDataUrl('doc', batch.join(',')),\n json: true,\n query: {excludeContent: 'true'},\n tag: 'documents-availability',\n })\n\n const omittedIds = omitted.reduce<Record<string, 'existence' | 'permission'>>((acc, next) => {\n acc[next.id] = next.reason\n return acc\n }, {})\n\n for (const id of batch) {\n // unless the document ID is in the `omitted` object explictly due to\n // the reason `'existence'`, then it should exist\n if (omittedIds[id] !== 'existence') {\n existingIds.add(id)\n }\n }\n }\n report.event.loadedReferenceIntegrity()\n\n return {existingIds}\n}\n\nasync function main() {\n // note: this is dynamically imported because this module is ESM only and this\n // file gets compiled to CJS at this time\n const {default: pMap} = await import('p-map')\n\n const cleanupBrowserEnvironment = mockBrowserEnvironment(workDir)\n\n let cleanupDownloadedDocuments: (() => Promise<void>) | undefined\n\n try {\n const {client, workspace} = await loadWorkspace()\n const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath\n ? await downloadFromFile(ndjsonFilePath)\n : await downloadFromExport(client)\n cleanupDownloadedDocuments = cleanup\n const {existingIds} = await checkReferenceExistence({client, referencedIds, documentIds})\n\n const getClient = <TOptions extends Partial<ClientConfig>>(options: TOptions) =>\n client.withConfig(options)\n\n const getDocumentExists: ValidationContext['getDocumentExists'] = ({id}) =>\n Promise.resolve(existingIds.has(id))\n\n const getLevel = (markers: ValidationMarker[]) => {\n let foundWarning = false\n for (const marker of markers) {\n if (marker.level === 'error') return 'error'\n if (marker.level === 'warning') foundWarning = true\n }\n\n if (foundWarning) return 'warning'\n return 'info'\n }\n\n let validatedCount = 0\n\n const validate = async (document: SanityDocument) => {\n let markers: ValidationMarker[]\n\n try {\n const timeout = Symbol('timeout')\n\n const result = await Promise.race([\n validateDocument({\n document,\n workspace,\n getClient,\n getDocumentExists,\n environment: 'cli',\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n }),\n new Promise<typeof timeout>((resolve) =>\n setTimeout(() => resolve(timeout), DOCUMENT_VALIDATION_TIMEOUT),\n ),\n ])\n\n if (result === timeout) {\n throw new Error(\n `Document '${document._id}' failed to validate within ${DOCUMENT_VALIDATION_TIMEOUT}ms.`,\n )\n }\n\n markers = result\n // remove deprecated `item` from the marker\n .map(({item, ...marker}) => marker)\n // filter out unwanted levels\n .filter((marker) => {\n const markerValue = levelValues[marker.level]\n const flagLevelValue =\n levelValues[level as keyof typeof levelValues] ?? levelValues.info\n return markerValue <= flagLevelValue\n })\n } catch (err) {\n const errorMessage =\n isRecord(err) && typeof err.message === 'string' ? err.message : 'Unknown error'\n\n const message = `Exception occurred while validating value: ${errorMessage}`\n\n markers = [\n {\n message,\n level: 'error',\n path: [],\n },\n ]\n }\n\n validatedCount++\n\n const intentUrl =\n studioHost &&\n `${studioHost}${path.resolve(\n workspace.basePath,\n `/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent(\n document._type,\n )}`,\n )}`\n\n report.stream.validation.emit({\n documentId: document._id,\n documentType: document._type,\n revision: document._rev,\n ...(intentUrl && {intentUrl}),\n markers,\n validatedCount,\n level: getLevel(markers),\n })\n }\n\n await pMap(getDocuments(), validate, {concurrency: MAX_VALIDATION_CONCURRENCY})\n\n report.stream.validation.end()\n } finally {\n await cleanupDownloadedDocuments?.()\n cleanupBrowserEnvironment()\n }\n}\n"],"names":["HEADER_SIZE","isGzip","buf","length","isDeflate","isTar","extract","stream","extractor","drained","Promise","resolve","reject","setTimeout","chunk","write","end","err","destroy","maybeExtractNdjson","buffer","Buffer","alloc","concat","fileHeader","restOfStream","zlib","createGunzip","createDeflate","entry","tar","filename","path","basename","header","name","extname","toLowerCase","startsWith","ndjsonChunk","extractDocumentsFromNdjsonOrTarball","file","lines","readline","createInterface","input","Readable","from","line","trimmed","trim","JSON","parse","close","createReporter","parentPort","Error","event","Proxy","get","target","payload","message","type","postMessage","emit","MAX_VALIDATION_CONCURRENCY","DOCUMENT_VALIDATION_TIMEOUT","REFERENCE_INTEGRITY_BATCH_SIZE","clientConfig","workDir","workspace","workspaceName","configPath","dataset","ndjsonFilePath","projectId","level","maxCustomValidationConcurrency","maxFetchConcurrency","studioHost","_workerData","isMainThread","levelValues","error","warning","info","report","getReferenceIds","value","ids","Set","traverse","node","isReference","add","_ref","item","Object","values","idRegex","isValidId","id","test","shouldIncludeDocument","document","_type","readerToGenerator","reader","done","read","main","then","process","exit","loadWorkspace","workspaces","getStudioWorkspaces","basePath","_workspace","find","w","client","createClient","requestTagPrefix","config","apiVersion","loadedWorkspace","downloadFromExport","exportUrl","URL","getUrl","documentCount","fetch","loadedDocumentCount","token","headers","Headers","Authorization","body","getReader","downloadedCount","referencedIds","documentIds","slugDate","Date","toISOString","replace","tempOutputFile","join","os","tmpdir","outputStream","fs","createWriteStream","_id","referenceId","exportProgress","exportFinished","totalDocumentsToValidate","size","getDocuments","createReadStream","cleanup","promises","rm","downloadFromFile","filePath","undefined","checkReferenceExistence","_referencedIds","existingIds","idsToCheck","Array","filter","has","sort","batches","reduce","acc","next","index","batchIndex","Math","floor","batch","push","ceil","map","omitted","request","uri","getDataUrl","json","query","excludeContent","tag","omittedIds","reason","loadedReferenceIntegrity","default","pMap","cleanupBrowserEnvironment","mockBrowserEnvironment","cleanupDownloadedDocuments","getClient","options","withConfig","getDocumentExists","getLevel","markers","foundWarning","marker","validatedCount","validate","timeout","result","race","validateDocument","environment","markerValue","flagLevelValue","isRecord","intentUrl","encodeURIComponent","validation","documentId","documentType","revision","_rev","concurrency"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQA,MAAMA,cAAc,KAGdC,SAAUC,CAAAA,QACdA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,MAAQA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,GAGhEE,YAAaF,CAAAA,QACjBA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,QAASA,IAAI,CAAC,MAAM,KAAKA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,MAGjFG,QAASH,CAAAA,QACbA,IAAIC,UAAU,OACdD,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,MACbA,IAAI,GAAG,MAAM;AAEf,gBAAgBI,QACdC,QACAC,WACA;AAIA,QAAMC,UAAU,IAAIC,QAAc,CAACC,SAASC,WAAW;AAErDC,eAAW,YAAY;AACrB,UAAI;AACF,yBAAiBC,SAASP,OAAQC,WAAUO,MAAMD,KAAK;AACvDN,kBAAUQ,IAAAA,GACVL,QAAAA;AAAAA,MACF,SAASM,KAAK;AACZL,eAAOK,GAAG;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,SAAOT,WACP,MAAMC,SACND,UAAUU,QAAAA;AACZ;AASA,gBAAgBC,mBAAmBZ,QAAsD;AACvF,MAAIa,SAASC,OAAOC,MAAM,CAAC;AAE3B,mBAAiBR,SAASP,QAAQ;AAEhC,QADAa,SAASC,OAAOE,OAAO,CAACH,QAAQN,KAAK,CAAC,GAClCM,OAAOjB,SAASH,YAAa;AAEjC,UAAMwB,aAAaJ,QACbK,eAAe,mBAA+B;AAClD,YAAMD,YACN,OAAOjB;AAAAA,IACT;AAEA,QAAIN,OAAOuB,UAAU,GAAG;AACtB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKC,aAAAA,CAAc,CAAC;AACtE;AAAA,IACF;AAEA,QAAIvB,UAAUoB,UAAU,GAAG;AACzB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKE,cAAAA,CAAe,CAAC;AACvE;AAAA,IACF;AAEA,QAAIvB,MAAMmB,UAAU;AAClB,uBAAiBK,SAASvB,QAAQmB,aAAAA,GAAgBK,aAAAA,QAAIxB,QAAAA,CAAS,GAAG;AAChE,cAAMyB,WAAWC,cAAAA,QAAKC,SAASJ,MAAMK,OAAOC,IAAI;AAGhD,YAAIC,EAFYJ,cAAAA,QAAKI,QAAQL,QAAQ,EAAEM,kBAEvB,aAAaN,SAASO,WAAW,GAAG,IAEpD;AAAA,2BAAiBC,eAAeV,MAAO,OAAMU;AAC7C;AAAA,QAAA;AAAA,MACF;AAGF,WAAOd,aAAAA;AAAAA,EACT;AACF;AAMA,gBAAuBe,oCACrBC,MAC+B;AAC/B,QAAMC,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IACrCC,OAAOC,YAAAA,SAASC,KAAK5B,mBAAmBsB,IAAI,CAAC;AAAA,EAAA,CAC9C;AAED,mBAAiBO,QAAQN,OAAO;AAC9B,UAAMO,UAAUD,KAAKE,KAAAA;AACjBD,gBAAS,MAAME,KAAKC,MAAMH,OAAO;AAAA,EACvC;AACAP,QAAMW,MAAAA;AACR;AC0EO,SAASC,eACdC,YACuC;AACvC,MAAI,CAACA;AACH,UAAM,IAAIC,MAAM,sBAAsB;AAGxC,SAAO;AAAA,IACLC,OAAO,IAAIC,MAAM,IAAsD;AAAA,MACrEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAEV0B,CAAAA,YAAY;AAChD,cAAMC,UAAwB;AAAA,UAACC,MAAM;AAAA,UAAS5B;AAAAA,UAAM0B;AAAAA,QAAAA;AACpDN,mBAAWS,YAAYF,OAAO;AAAA,MAChC;AAAA,IAAA,CAIH;AAAA,IACDvD,QAAQ,IAAImD,MAAM,IAAuD;AAAA,MACvEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAET;AAAA,QACrC8B,MAAOJ,CAAAA,YAAY;AACjB,gBAAMC,UAAiC;AAAA,YAACC,MAAM;AAAA,YAAY5B;AAAAA,YAAM0B;AAAAA,UAAAA;AAChEN,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,QACA9C,KAAKA,MAAM;AACT,gBAAM8C,UAA4B;AAAA,YAACC,MAAM;AAAA,YAAO5B;AAAAA,UAAAA;AAChDoB,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,MAAA;AAAA,IACF,CAIH;AAAA,EAAA;AAEL;AC1MA,MAAMI,6BAA6B,KAC7BC,8BAA8B,KAC9BC,iCAAiC,KA4CjC;AAAA,EACJC;AAAAA,EACAC;AAAAA,EACAC,WAAWC;AAAAA,EACXC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AACF,IAAIC,oBAAAA;AAEJ,IAAIC,oBAAAA,gBAAgB,CAAC3B,oBAAAA;AACnB,QAAM,IAAIC,MAAM,4CAA4C;AAG9D,MAAM2B,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAE5CC,SAASjC,eAAwCC,oBAAAA,UAAU,GAE3DiC,kBAAmBC,CAAAA,UAAmB;AAC1C,QAAMC,0BAAUC,IAAAA;AAEhB,WAASC,SAASC,MAAe;AAC/B,QAAIC,MAAAA,YAAYD,IAAI,GAAG;AACrBH,UAAIK,IAAIF,KAAKG,IAAI;AACjB;AAAA,IACF;AAEA,QAAI,OAAOH,QAAS,YAAYA;AAE9B,iBAAWI,QAAQC,OAAOC,OAAON,IAAI,YAAYI,IAAI;AAAA,EAEzD;AAEAL,SAAAA,SAASH,KAAK,GAEPC;AACT,GAEMU,UAAU,uBAGVC,YAAaC,CAAAA,OAAgB,OAAOA,MAAO,YAAYF,QAAQG,KAAKD,EAAE,GACtEE,wBAAyBC,CAAAA,aAEtB,CAACA,SAASC,MAAMpE,WAAW,SAAS,KAAK,CAACmE,SAASC,MAAMpE,WAAW,SAAS;AAGtF,gBAAgBqE,kBAAkBC,QAAiD;AACjF,aAAa;AACX,UAAM;AAAA,MAACnB;AAAAA,MAAOoB;AAAAA,IAAAA,IAAQ,MAAMD,OAAOE,KAAAA;AAEnC,QADIrB,UAAO,MAAMA,QACboB,KAAM;AAAA,EACZ;AACF;AAEKE,KAAAA,EAAOC,KAAK,MAAMC,QAAQC,MAAM;AAErC,eAAeC,gBAAgB;AAC7B,QAAMC,aAAa,MAAMC,wCAAoB;AAAA,IAACC,UAAUhD;AAAAA,IAASG;AAAAA,EAAAA,CAAW;AAE5E,MAAI,CAAC2C,WAAWjH;AACd,UAAM,IAAIqD,MAAM,8CAA8C;AAGhE,MAAI+D;AACJ,MAAI/C;AAEF,QADA+C,aAAaH,WAAWI,KAAMC,CAAAA,MAAMA,EAAEtF,SAASqC,aAAa,GACxD,CAAC+C;AACH,YAAM,IAAI/D,MAAM,6CAA6CgB,aAAa,IAAI;AAAA,SAE3E;AACL,QAAI4C,WAAWjH,WAAW;AACxB,YAAM,IAAIqD,MACR,sFACF;AAEF+D,iBAAaH,WAAW,CAAC;AAAA,EAC3B;AACA,QAAM7C,YAAYgD,YAEZG,WAASC,oBAAa;AAAA,IAC1B,GAAGtD;AAAAA,IACHK,SAASA,WAAWH,UAAUG;AAAAA,IAC9BE,WAAWA,aAAaL,UAAUK;AAAAA,IAClCgD,kBAAkB;AAAA,EAAA,CACnB,EAAEC,OAAO;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc;AAErCvC,SAAAA,OAAO9B,MAAMsE,gBAAgB;AAAA,IAC3BnD,WAAWL,UAAUK;AAAAA,IACrBF,SAASH,UAAUG;AAAAA,IACnBvC,MAAMoC,UAAUpC;AAAAA,IAChBmF,UAAU/C,UAAU+C;AAAAA,EAAAA,CACrB,GAEM;AAAA,IAAC/C;AAAAA,IAAAA,QAAWmD;AAAAA,EAAAA;AACrB;AAEA,eAAeM,mBAAmBN,SAAsB;AACtD,QAAMO,YAAY,IAAIC,IAAIR,QAAOS,OAAO,gBAAgBT,QAAOG,OAAAA,EAASnD,OAAO,IAAI,EAAK,CAAC,GAEnF0D,gBAAgB,MAAMV,QAAOW,MAAM,WAAW;AACpD9C,SAAO9B,MAAM6E,oBAAoB;AAAA,IAACF;AAAAA,EAAAA,CAAc;AAEhD,QAAM;AAAA,IAACG;AAAAA,EAAAA,IAASb,QAAOG,OAAAA,GAKjBjB,UAJW,MAAMyB,MAAMJ,WAAW;AAAA,IACtCO,SAAS,IAAIC,QAAQ;AAAA,MAAC,GAAIF,SAAS;AAAA,QAACG,eAAe,UAAUH,KAAK;AAAA,MAAA;AAAA,IAAE,CAAG;AAAA,EAAA,CACxE,GAEuBI,MAAMC,UAAAA;AAC9B,MAAI,CAAChC,OAAQ,OAAM,IAAIpD,MAAM,0CAA0C;AAEvE,MAAIqF,kBAAkB;AACtB,QAAMC,gBAAgB,oBAAInD,IAAAA,GACpBoD,kCAAkBpD,OAClBjD,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IAACC,OAAOC,YAAAA,SAASC,KAAK4D,kBAAkBC,MAAM,CAAC;AAAA,EAAA,CAAE,GAKlFoC,YAAW,oBAAIC,KAAAA,GAClBC,YAAAA,EACAC,QAAQ,eAAe,GAAG,EAC1B9G,YAAAA,GACG+G,iBAAiBpH,sBAAKqH,KAAKC,YAAAA,QAAGC,OAAAA,GAAU,mBAAmBP,QAAQ,SAAS,GAC5EQ,eAAeC,oBAAGC,kBAAkBN,cAAc;AAExD,mBAAiBpG,QAAQN,OAAO;AAC9B,UAAM+D,WAAWtD,KAAKC,MAAMJ,IAAI;AAEhC,QAAIwD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAG/BJ,mBAAazI,MAAM,GAAGiC,IAAI;AAAA,CAAI;AAAA,IAChC;AAEA6F,uBACAtD,OAAOhF,OAAOsJ,eAAe5F,KAAK;AAAA,MAAC4E;AAAAA,MAAiBT;AAAAA,IAAAA,CAAc;AAAA,EACpE;AAEA,SAAA,MAAM,IAAI1H,QAAc,CAACC,SAASC,WAChC4I,aAAanG,MAAOpC,CAAAA,QAASA,MAAML,OAAOK,GAAG,IAAIN,QAAAA,CAAU,CAC7D,GAEA4E,OAAOhF,OAAOsJ,eAAe7I,OAC7BuE,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAKjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB,cAHfA,MACnBzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBd,cAAc,CAAC;AAAA,IAEvBe,SAASA,MAAMV,YAAAA,QAAGW,SAASC,GAAGjB,cAAc;AAAA,EAAA;AAChG;AAEA,eAAekB,iBAAiBC,UAAkB;AAChD,QAAMzB,gBAAgB,oBAAInD,OACpBoD,cAAc,oBAAIpD,IAAAA,GAClBsE,eAAeA,MAAMzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBK,QAAQ,CAAC;AAE5F,mBAAiB9D,YAAYwD,aAAAA;AAC3B,QAAIzD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAAA,IAEjC;AAGFrE,SAAAA,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAEjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB;AAAAA,IAAcE,SAASK;AAAAA,EAAAA;AAC7D;AAQA,eAAeC,wBAAwB;AAAA,EACrC/C,QAAAA;AAAAA,EACAqB;AAAAA,EACAD,eAAe4B;AACe,GAAG;AACjC,QAAMC,cAAc,IAAIhF,IAAIoD,WAAW,GACjC6B,aAAaC,MAAM9H,KAAK2H,cAAc,EACzCI,OAAQxE,CAAAA,OAAO,CAACqE,YAAYI,IAAIzE,EAAE,KAAKD,UAAUC,EAAE,CAAC,EACpD0E,KAAAA,GAEGC,UAAUL,WAAWM,OACzB,CAACC,KAAKC,MAAMC,UAAU;AACpB,UAAMC,aAAaC,KAAKC,MAAMH,QAAQjH,8BAA8B;AAEpEqH,WADcN,IAAIG,UAAU,EACtBI,KAAKN,IAAI,GACRD;AAAAA,EACT,GACAN,MAAM9H,KAAe;AAAA,IACnB5C,QAAQoL,KAAKI,KAAKf,WAAWzK,SAASiE,8BAA8B;AAAA,EAAA,CACrE,EAAEwH,IAAI,MAAM,CAAA,CAAE,CACjB;AAEA,aAAWH,SAASR,SAAS;AAC3B,UAAM;AAAA,MAACY;AAAAA,IAAAA,IAAW,MAAMnE,QAAOoE,QAA8B;AAAA,MAC3DC,KAAKrE,QAAOsE,WAAW,OAAOP,MAAMpC,KAAK,GAAG,CAAC;AAAA,MAC7C4C,MAAM;AAAA,MACNC,OAAO;AAAA,QAACC,gBAAgB;AAAA,MAAA;AAAA,MACxBC,KAAK;AAAA,IAAA,CACN,GAEKC,aAAaR,QAAQX,OAAmD,CAACC,KAAKC,UAClFD,IAAIC,KAAK9E,EAAE,IAAI8E,KAAKkB,QACbnB,MACN,CAAA,CAAE;AAEL,eAAW7E,MAAMmF;AAGXY,iBAAW/F,EAAE,MAAM,eACrBqE,YAAY5E,IAAIO,EAAE;AAAA,EAGxB;AACAf,SAAAA,OAAO9B,MAAM8I,4BAEN;AAAA,IAAC5B;AAAAA,EAAAA;AACV;AAEA,eAAe5D,OAAO;AAGpB,QAAM;AAAA,IAACyF,SAASC;AAAAA,EAAAA,IAAQ,MAAM,OAAO,OAAO,GAEtCC,4BAA4BC,uBAAAA,uBAAuBrI,OAAO;AAEhE,MAAIsI;AAEJ,MAAI;AACF,UAAM;AAAA,MAAClF,QAAAA;AAAAA,MAAQnD;AAAAA,IAAAA,IAAa,MAAM4C,cAAAA,GAC5B;AAAA,MAAC4B;AAAAA,MAAaD;AAAAA,MAAemB;AAAAA,MAAcE;AAAAA,IAAAA,IAAWxF,iBACxD,MAAM2F,iBAAiB3F,cAAc,IACrC,MAAMqD,mBAAmBN,OAAM;AACnCkF,iCAA6BzC;AAC7B,UAAM;AAAA,MAACQ;AAAAA,IAAAA,IAAe,MAAMF,wBAAwB;AAAA,MAAC/C,QAAAA;AAAAA,MAAQoB;AAAAA,MAAeC;AAAAA,IAAAA,CAAY,GAElF8D,YAAqDC,CAAAA,YACzDpF,QAAOqF,WAAWD,OAAO,GAErBE,oBAA4DA,CAAC;AAAA,MAAC1G;AAAAA,IAAAA,MAClE5F,QAAQC,QAAQgK,YAAYI,IAAIzE,EAAE,CAAC,GAE/B2G,WAAYC,CAAAA,YAAgC;AAChD,UAAIC,eAAe;AACnB,iBAAWC,UAAUF,SAAS;AAC5B,YAAIE,OAAOvI,UAAU,QAAS,QAAO;AACjCuI,eAAOvI,UAAU,cAAWsI,eAAe;AAAA,MACjD;AAEA,aAAIA,eAAqB,YAClB;AAAA,IACT;AAEA,QAAIE,iBAAiB;AAErB,UAAMC,WAAW,OAAO7G,aAA6B;AACnD,UAAIyG;AAEJ,UAAI;AACF,cAAMK,iCAAiB,SAAS,GAE1BC,SAAS,MAAM9M,QAAQ+M,KAAK,CAChCC,wBAAiB;AAAA,UACfjH;AAAAA,UACAlC;AAAAA,UACAsI;AAAAA,UACAG;AAAAA,UACAW,aAAa;AAAA,UACb7I;AAAAA,UACAC;AAAAA,QAAAA,CACD,GACD,IAAIrE,QAAyBC,CAAAA,YAC3BE,WAAW,MAAMF,QAAQ4M,OAAO,GAAGpJ,2BAA2B,CAChE,CAAC,CACF;AAED,YAAIqJ,WAAWD;AACb,gBAAM,IAAI/J,MACR,aAAaiD,SAASkD,GAAG,+BAA+BxF,2BAA2B,KACrF;AAGF+I,kBAAUM,OAEP5B,IAAI,CAAC;AAAA,UAAC3F;AAAAA,UAAM,GAAGmH;AAAAA,QAAAA,MAAYA,MAAM,EAEjCtC,OAAQsC,CAAAA,WAAW;AAClB,gBAAMQ,cAAczI,YAAYiI,OAAOvI,KAAK,GACtCgJ,iBACJ1I,YAAYN,KAAK,KAAiCM,YAAYG;AAChE,iBAAOsI,eAAeC;AAAAA,QACxB,CAAC;AAAA,MACL,SAAS5M,KAAK;AAMZiM,kBAAU,CACR;AAAA,UACEpJ,SAJY,8CAFdgK,OAAAA,SAAS7M,GAAG,KAAK,OAAOA,IAAI6C,WAAY,WAAW7C,IAAI6C,UAAU,eAEO;AAAA,UAKtEe,OAAO;AAAA,UACP7C,MAAM,CAAA;AAAA,QAAA,CACP;AAAA,MAEL;AAEAqL;AAEA,YAAMU,YACJ/I,cACA,GAAGA,UAAU,GAAGhD,cAAAA,QAAKrB,QACnB4D,UAAU+C,UACV,mBAAmB0G,mBAAmBvH,SAASkD,GAAG,CAAC,SAASqE,mBAC1DvH,SAASC,KACX,CAAC,EACH,CAAC;AAEHnB,aAAOhF,OAAO0N,WAAWhK,KAAK;AAAA,QAC5BiK,YAAYzH,SAASkD;AAAAA,QACrBwE,cAAc1H,SAASC;AAAAA,QACvB0H,UAAU3H,SAAS4H;AAAAA,QACnB,GAAIN,aAAa;AAAA,UAACA;AAAAA,QAAAA;AAAAA,QAClBb;AAAAA,QACAG;AAAAA,QACAxI,OAAOoI,SAASC,OAAO;AAAA,MAAA,CACxB;AAAA,IACH;AAEA,UAAMT,KAAKxC,aAAAA,GAAgBqD,UAAU;AAAA,MAACgB,aAAapK;AAAAA,IAAAA,CAA2B,GAE9EqB,OAAOhF,OAAO0N,WAAWjN,IAAAA;AAAAA,EAC3B,UAAA;AACE,UAAM4L,6BAAAA,GACNF,0BAAAA;AAAAA,EACF;AACF;"}
|
package/lib/_singletons.d.ts
CHANGED
|
@@ -535,7 +535,11 @@ declare interface ArrayOfObjectsInputProps<
|
|
|
535
535
|
/**
|
|
536
536
|
* @hidden
|
|
537
537
|
* @beta */
|
|
538
|
-
onUpload
|
|
538
|
+
onUpload?: (event: UploadEvent) => void
|
|
539
|
+
/**
|
|
540
|
+
* @hidden
|
|
541
|
+
* @beta */
|
|
542
|
+
onSelectFile?: (props: InputOnSelectFileFunctionProps) => void
|
|
539
543
|
/**
|
|
540
544
|
* @hidden
|
|
541
545
|
* @beta */
|
|
@@ -647,7 +651,11 @@ declare interface ArrayOfObjectsInputProps_2<
|
|
|
647
651
|
/**
|
|
648
652
|
* @hidden
|
|
649
653
|
* @beta */
|
|
650
|
-
onUpload
|
|
654
|
+
onUpload?: (event: UploadEvent_2) => void
|
|
655
|
+
/**
|
|
656
|
+
* @hidden
|
|
657
|
+
* @beta */
|
|
658
|
+
onSelectFile?: (props: InputOnSelectFileFunctionProps_2) => void
|
|
651
659
|
/**
|
|
652
660
|
* @hidden
|
|
653
661
|
* @beta */
|
|
@@ -7623,6 +7631,24 @@ declare class InitialValueTemplateItemBuilder implements Serializable_2<InitialV
|
|
|
7623
7631
|
|
|
7624
7632
|
declare type Input = Omit<SetPreviewSearchParamEvent, 'type'>
|
|
7625
7633
|
|
|
7634
|
+
/**
|
|
7635
|
+
* @hidden
|
|
7636
|
+
* @beta */
|
|
7637
|
+
declare interface InputOnSelectFileFunctionProps {
|
|
7638
|
+
assetSource: AssetSource
|
|
7639
|
+
schemaType: SchemaType
|
|
7640
|
+
file: File
|
|
7641
|
+
}
|
|
7642
|
+
|
|
7643
|
+
/**
|
|
7644
|
+
* @hidden
|
|
7645
|
+
* @beta */
|
|
7646
|
+
declare interface InputOnSelectFileFunctionProps_2 {
|
|
7647
|
+
assetSource: AssetSource
|
|
7648
|
+
schemaType: SchemaType
|
|
7649
|
+
file: File
|
|
7650
|
+
}
|
|
7651
|
+
|
|
7626
7652
|
/**
|
|
7627
7653
|
* @hidden
|
|
7628
7654
|
* @public */
|
|
@@ -8073,6 +8099,21 @@ declare interface ListPaneNode extends BaseResolvedPaneNode<'list'> {
|
|
|
8073
8099
|
source?: string
|
|
8074
8100
|
}
|
|
8075
8101
|
|
|
8102
|
+
/**
|
|
8103
|
+
*
|
|
8104
|
+
* @hidden
|
|
8105
|
+
* @internal
|
|
8106
|
+
*/
|
|
8107
|
+
export declare const LiveUserApplicationContext: Context<LiveUserApplicationContextValue>
|
|
8108
|
+
|
|
8109
|
+
/**
|
|
8110
|
+
* @hidden
|
|
8111
|
+
* @internal
|
|
8112
|
+
*/
|
|
8113
|
+
export declare type LiveUserApplicationContextValue = {
|
|
8114
|
+
userApplication: UserApplication | undefined
|
|
8115
|
+
}
|
|
8116
|
+
|
|
8076
8117
|
declare type Loadable<T> = {
|
|
8077
8118
|
data: T | null
|
|
8078
8119
|
error: Error | null
|
|
@@ -18338,6 +18379,39 @@ declare interface UpsellDialogViewedInfo extends UpsellDialogActionsInfo {
|
|
|
18338
18379
|
source: 'field_action' | 'document_toolbar' | 'document_action' | 'navbar' | 'link' | 'pte'
|
|
18339
18380
|
}
|
|
18340
18381
|
|
|
18382
|
+
/**
|
|
18383
|
+
* User application from the API
|
|
18384
|
+
* @internal
|
|
18385
|
+
*/
|
|
18386
|
+
declare interface UserApplication {
|
|
18387
|
+
id: string
|
|
18388
|
+
type: string
|
|
18389
|
+
projectId?: string
|
|
18390
|
+
organizationId?: string
|
|
18391
|
+
title?: string
|
|
18392
|
+
urlType: 'internal' | 'external'
|
|
18393
|
+
appHost: string
|
|
18394
|
+
apiHost: string
|
|
18395
|
+
}
|
|
18396
|
+
|
|
18397
|
+
/**
|
|
18398
|
+
* Cache for user applications fetched from the API.
|
|
18399
|
+
* Caches by projectId to avoid duplicate fetches.
|
|
18400
|
+
* @internal
|
|
18401
|
+
*/
|
|
18402
|
+
declare interface UserApplicationCache {
|
|
18403
|
+
/**
|
|
18404
|
+
* Get user applications for a project.
|
|
18405
|
+
* Returns cached results if available, otherwise fetches from API.
|
|
18406
|
+
*/
|
|
18407
|
+
get: (projectId: string, apiHost?: string) => Promise<UserApplication[]>
|
|
18408
|
+
}
|
|
18409
|
+
|
|
18410
|
+
/**
|
|
18411
|
+
* @internal
|
|
18412
|
+
*/
|
|
18413
|
+
export declare const UserApplicationCacheContext: Context<UserApplicationCache | null>
|
|
18414
|
+
|
|
18341
18415
|
/** @internal */
|
|
18342
18416
|
declare interface UserColor {
|
|
18343
18417
|
name: ColorHueKey
|
|
@@ -18687,6 +18761,12 @@ declare interface WorkspaceSummary extends DefaultPluginsWorkspaceOptions {
|
|
|
18687
18761
|
auth: AuthStore
|
|
18688
18762
|
projectId: string
|
|
18689
18763
|
dataset: string
|
|
18764
|
+
/**
|
|
18765
|
+
* API hostname used for requests. Used to determine if the workspace
|
|
18766
|
+
* points to staging or production environment.
|
|
18767
|
+
* @internal
|
|
18768
|
+
*/
|
|
18769
|
+
apiHost?: string
|
|
18690
18770
|
theme: StudioTheme
|
|
18691
18771
|
schema: Schema
|
|
18692
18772
|
i18n: LocaleSource
|
package/lib/_singletons.js
CHANGED
|
@@ -40,7 +40,9 @@ const ActiveWorkspaceMatcherContext = createContext("sanity/_singletons/context/
|
|
|
40
40
|
},
|
|
41
41
|
onMouseLeave: () => {
|
|
42
42
|
}
|
|
43
|
-
}), IsLastPaneContext = createContext("sanity/_singletons/context/is-last-pane", !1),
|
|
43
|
+
}), IsLastPaneContext = createContext("sanity/_singletons/context/is-last-pane", !1), LiveUserApplicationContext = createContext("sanity/_singletons/context/live-user-application", {
|
|
44
|
+
userApplication: void 0
|
|
45
|
+
}), LocaleContext = createContext("sanity/_singletons/context/locale", void 0), MediaLibraryIdsContext = createContext("sanity/_singletons/context/media-library", null), MentionUserContext = createContext("sanity/_singletons/context/mention-user", null), NavbarContext = createContext("sanity/_singletons/context/navbar", {
|
|
44
46
|
onSearchFullscreenOpenChange: () => "",
|
|
45
47
|
onSearchOpenChange: () => "",
|
|
46
48
|
searchFullscreenOpen: !1,
|
|
@@ -143,7 +145,7 @@ const ResourceCacheContext = createContext("sanity/_singletons/context/resource-
|
|
|
143
145
|
}), SortableItemIdContext = createContext("sanity/_singletons/context/sortable-item-id", null), SourceContext = createContext("sanity/_singletons/context/source", null), StructureToolContext = createContext("sanity/_singletons/context/structure-tool", null), StudioAnnouncementContext = createContext("sanity/_singletons/context/studioAnnouncements", void 0), TasksContext = createContext("sanity/_singletons/context/tasks", null), TasksEnabledContext = createContext("sanity/_singletons/context/tasks-enabled", {
|
|
144
146
|
enabled: !1,
|
|
145
147
|
mode: null
|
|
146
|
-
}), TasksNavigationContext = createContext("sanity/_singletons/context/tasks-navigation", null), TasksUpsellContext = createContext("sanity/_singletons/context/tasks-upsell", null), UserColorManagerContext = createContext("sanity/_singletons/context/user-color-manager", null), ValidationContext = createContext("sanity/_singletons/context/validation", []), VirtualizerScrollInstanceContext = createContext("sanity/_singletons/context/virtualizer-scroll-instance", null), WorkspaceContext = createContext("sanity/_singletons/context/workspace", null), WorkspacesContext = createContext("sanity/_singletons/context/workspaces", null), zIndexContextDefaults = {
|
|
148
|
+
}), TasksNavigationContext = createContext("sanity/_singletons/context/tasks-navigation", null), TasksUpsellContext = createContext("sanity/_singletons/context/tasks-upsell", null), UserApplicationCacheContext = createContext("sanity/_singletons/context/user-application-cache", null), UserColorManagerContext = createContext("sanity/_singletons/context/user-color-manager", null), ValidationContext = createContext("sanity/_singletons/context/validation", []), VirtualizerScrollInstanceContext = createContext("sanity/_singletons/context/virtualizer-scroll-instance", null), WorkspaceContext = createContext("sanity/_singletons/context/workspace", null), WorkspacesContext = createContext("sanity/_singletons/context/workspaces", null), zIndexContextDefaults = {
|
|
147
149
|
navbar: 200,
|
|
148
150
|
navbarPopover: 5e5,
|
|
149
151
|
navbarDialog: 500001,
|
|
@@ -211,6 +213,7 @@ export {
|
|
|
211
213
|
GetFormValueContext,
|
|
212
214
|
HoveredFieldContext,
|
|
213
215
|
IsLastPaneContext,
|
|
216
|
+
LiveUserApplicationContext,
|
|
214
217
|
LocaleContext,
|
|
215
218
|
MediaLibraryIdsContext,
|
|
216
219
|
MentionUserContext,
|
|
@@ -262,6 +265,7 @@ export {
|
|
|
262
265
|
TasksEnabledContext,
|
|
263
266
|
TasksNavigationContext,
|
|
264
267
|
TasksUpsellContext,
|
|
268
|
+
UserApplicationCacheContext,
|
|
265
269
|
UserColorManagerContext,
|
|
266
270
|
ValidationContext,
|
|
267
271
|
VirtualizerScrollInstanceContext,
|