sanity 3.71.3-cli-validate.41 → 3.72.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- const SANITY_VERSION = "3.71.3-cli-validate.41+8183e72990";
1
+ const SANITY_VERSION = "3.72.0";
2
2
  export {
3
3
  SANITY_VERSION
4
4
  };
@@ -138,7 +138,7 @@ const levelValues = {
138
138
  for (const item of Object.values(node)) traverse(item);
139
139
  }
140
140
  return traverse(value), ids;
141
- }, idRegex = /^[^-][A-Z0-9._-]*$/i, isValidId = (id) => typeof id == "string" && idRegex.test(id), shouldIncludeDocument = (document) => !document._type.startsWith("system.") && !document._type.startsWith("sanity.");
141
+ }, idRegex = /^[^-][A-Z0-9._-]*$/i, isValidId = (id) => typeof id == "string" && idRegex.test(id), shouldIncludeDocument = (document) => !document._type.startsWith("system.");
142
142
  async function* readerToGenerator(reader) {
143
143
  for (; ; ) {
144
144
  const {
@@ -1 +1 @@
1
- {"version":3,"file":"validateDocuments.js","sources":["../../../../src/_internal/cli/util/extractDocumentsFromNdjsonOrTarball.ts","../../../../src/_internal/cli/util/workerChannels.ts","../../../../src/_internal/cli/threads/validateDocuments.ts"],"sourcesContent":["import path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable, type Writable} from 'node:stream'\nimport zlib from 'node:zlib'\n\nimport {type SanityDocument} from '@sanity/types'\nimport tar from 'tar-stream'\n\nconst HEADER_SIZE = 300\n\n// https://github.com/kevva/is-gzip/blob/13dab7c877787bd5cff9de5482b1736f00df99c6/index.js\nconst isGzip = (buf: Buffer) =>\n buf.length >= 3 && buf[0] === 0x1f && buf[1] === 0x8b && buf[2] === 0x08\n\n// https://github.com/watson/is-deflate/blob/f9e8f0c7814eed715e13e29e97c69acee319686a/index.js\nconst isDeflate = (buf: Buffer) =>\n buf.length >= 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n\n// https://github.com/kevva/is-tar/blob/d295ffa2002a5d415946fc3d49f024ace8c28bd3/index.js\nconst isTar = (buf: Buffer) =>\n buf.length >= 262 &&\n buf[257] === 0x75 &&\n buf[258] === 0x73 &&\n buf[259] === 0x74 &&\n buf[260] === 0x61 &&\n buf[261] === 0x72\n\nasync function* extract<TReturn>(\n stream: AsyncIterable<Buffer>,\n extractor: Writable & AsyncIterable<TReturn>,\n) {\n // set up a task to drain the input iterable into the extractor asynchronously\n // before this function delegates to the extractor's iterable (containing the\n // result of the extraction)\n const drained = new Promise<void>((resolve, reject) => {\n // setTimeout is used here to ensure draining occurs after delegation\n setTimeout(async () => {\n try {\n for await (const chunk of stream) extractor.write(chunk)\n extractor.end()\n resolve()\n } catch (err) {\n reject(err)\n }\n })\n })\n\n // have this function delegate the results of the extractor\n yield* extractor\n await drained\n extractor.destroy()\n}\n\n/**\n * Given a async iterable of buffers, looks at the header of the file in the\n * first few bytes to see the file type then extracts the contents tries again.\n * If the given iterable of buffers is a tarball then it looks for an ndjson\n * files and returns another iterable of buffers with the contents of the\n * ndjson file\n */\nasync function* maybeExtractNdjson(stream: AsyncIterable<Buffer>): AsyncIterable<Buffer> {\n let buffer = Buffer.alloc(0)\n\n for await (const chunk of stream) {\n buffer = Buffer.concat([buffer, chunk])\n if (buffer.length < HEADER_SIZE) continue\n\n const fileHeader = buffer\n const restOfStream = async function* restOfStream() {\n yield fileHeader\n yield* stream\n }\n\n if (isGzip(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createGunzip()))\n return\n }\n\n if (isDeflate(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createDeflate()))\n return\n }\n\n if (isTar(fileHeader)) {\n for await (const entry of extract(restOfStream(), tar.extract())) {\n const filename = path.basename(entry.header.name)\n const extname = path.extname(filename).toLowerCase()\n // ignore hidden and non-ndjson files\n if (extname !== '.ndjson' || filename.startsWith('.')) continue\n\n for await (const ndjsonChunk of entry) yield ndjsonChunk\n return\n }\n }\n\n yield* restOfStream()\n }\n}\n\n/**\n * Takes in an async iterable of buffers from an ndjson file or tarball and\n * returns an async iterable of sanity documents.\n */\nexport async function* extractDocumentsFromNdjsonOrTarball(\n file: AsyncIterable<Buffer>,\n): AsyncIterable<SanityDocument> {\n const lines = readline.createInterface({\n input: Readable.from(maybeExtractNdjson(file)),\n })\n\n for await (const line of lines) {\n const trimmed = line.trim()\n if (trimmed) yield JSON.parse(trimmed) as SanityDocument\n }\n lines.close()\n}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import fs from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable} from 'node:stream'\nimport {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n type ClientConfig,\n createClient,\n type SanityClient,\n type SanityDocument,\n} from '@sanity/client'\nimport {isReference, type ValidationContext, type ValidationMarker} from '@sanity/types'\nimport {isRecord, validateDocument} from 'sanity'\n\nimport {extractDocumentsFromNdjsonOrTarball} from '../util/extractDocumentsFromNdjsonOrTarball'\nimport {getStudioWorkspaces} from '../util/getStudioWorkspaces'\nimport {mockBrowserEnvironment} from '../util/mockBrowserEnvironment'\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannels'\n\nconst MAX_VALIDATION_CONCURRENCY = 100\nconst DOCUMENT_VALIDATION_TIMEOUT = 30000\nconst REFERENCE_INTEGRITY_BATCH_SIZE = 100\n\ninterface AvailabilityResponse {\n omitted: {id: string; reason: 'existence' | 'permission'}[]\n}\n\n/** @internal */\nexport interface ValidateDocumentsWorkerData {\n workDir: string\n configPath?: string\n workspace?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string\n dataset?: string\n ndjsonFilePath?: string\n level?: ValidationMarker['level']\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n studioHost?: string\n}\n\n/** @internal */\nexport type ValidationWorkerChannel = WorkerChannel<{\n loadedWorkspace: WorkerChannelEvent<{\n name: string\n projectId: string\n dataset: string\n basePath: string\n }>\n loadedDocumentCount: WorkerChannelEvent<{documentCount: number}>\n exportProgress: WorkerChannelStream<{downloadedCount: number; documentCount: number}>\n exportFinished: WorkerChannelEvent<{totalDocumentsToValidate: number}>\n loadedReferenceIntegrity: WorkerChannelEvent\n validation: WorkerChannelStream<{\n validatedCount: number\n documentId: string\n documentType: string\n intentUrl?: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n }>\n}>\n\nconst {\n clientConfig,\n workDir,\n workspace: workspaceName,\n configPath,\n dataset,\n ndjsonFilePath,\n projectId,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost,\n} = _workerData as ValidateDocumentsWorkerData\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst levelValues = {error: 0, warning: 1, info: 2} as const\n\nconst report = createReporter<ValidationWorkerChannel>(parentPort)\n\nconst getReferenceIds = (value: unknown) => {\n const ids = new Set<string>()\n\n function traverse(node: unknown) {\n if (isReference(node)) {\n ids.add(node._ref)\n return\n }\n\n if (typeof node === 'object' && node) {\n // Note: this works for arrays too\n for (const item of Object.values(node)) traverse(item)\n }\n }\n\n traverse(value)\n\n return ids\n}\n\nconst idRegex = /^[^-][A-Z0-9._-]*$/i\n\n// during testing, the `doc` endpoint 502'ed if given an invalid ID\nconst isValidId = (id: unknown) => typeof id === 'string' && idRegex.test(id)\nconst shouldIncludeDocument = (document: SanityDocument) => {\n // Filter out system documents and sanity documents\n return !document._type.startsWith('system.') && !document._type.startsWith('sanity.')\n}\n\nasync function* readerToGenerator(reader: ReadableStreamDefaultReader<Uint8Array>) {\n while (true) {\n const {value, done} = await reader.read()\n if (value) yield value\n if (done) return\n }\n}\n\nmain().then(() => process.exit())\n\nasync function loadWorkspace() {\n const workspaces = await getStudioWorkspaces({basePath: workDir, configPath})\n\n if (!workspaces.length) {\n throw new Error(`Configuration did not return any workspaces.`)\n }\n\n let _workspace\n if (workspaceName) {\n _workspace = workspaces.find((w) => w.name === workspaceName)\n if (!_workspace) {\n throw new Error(`Could not find any workspaces with name \\`${workspaceName}\\``)\n }\n } else {\n if (workspaces.length !== 1) {\n throw new Error(\n \"Multiple workspaces found. Please specify which workspace to use with '--workspace'.\",\n )\n }\n _workspace = workspaces[0]\n }\n const workspace = _workspace\n\n const client = createClient({\n ...clientConfig,\n dataset: dataset || workspace.dataset,\n projectId: projectId || workspace.projectId,\n requestTagPrefix: 'sanity.cli.validate',\n }).config({apiVersion: 'v2021-03-25'})\n\n report.event.loadedWorkspace({\n projectId: workspace.projectId,\n dataset: workspace.dataset,\n name: workspace.name,\n basePath: workspace.basePath,\n })\n\n return {workspace, client}\n}\n\nasync function downloadFromExport(client: SanityClient) {\n const exportUrl = new URL(client.getUrl(`/data/export/${client.config().dataset}`, false))\n\n const documentCount = await client.fetch('length(*)')\n report.event.loadedDocumentCount({documentCount})\n\n const {token} = client.config()\n const response = await fetch(exportUrl, {\n headers: new Headers({...(token && {Authorization: `Bearer ${token}`})}),\n })\n\n const reader = response.body?.getReader()\n if (!reader) throw new Error('Could not get reader from response body.')\n\n let downloadedCount = 0\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const lines = readline.createInterface({input: Readable.from(readerToGenerator(reader))})\n\n // Note: we stream the export to a file and then re-read from that file to\n // make this less memory intensive.\n // this is a similar pattern to the import/export CLI commands\n const slugDate = new Date()\n .toISOString()\n .replace(/[^a-z0-9]/gi, '-')\n .toLowerCase()\n const tempOutputFile = path.join(os.tmpdir(), `sanity-validate-${slugDate}.ndjson`)\n const outputStream = fs.createWriteStream(tempOutputFile)\n\n for await (const line of lines) {\n const document = JSON.parse(line) as SanityDocument\n\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n\n outputStream.write(`${line}\\n`)\n }\n\n downloadedCount++\n report.stream.exportProgress.emit({downloadedCount, documentCount})\n }\n\n await new Promise<void>((resolve, reject) =>\n outputStream.close((err) => (err ? reject(err) : resolve())),\n )\n\n report.stream.exportProgress.end()\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n const getDocuments = () =>\n extractDocumentsFromNdjsonOrTarball(fs.createReadStream(tempOutputFile))\n\n return {documentIds, referencedIds, getDocuments, cleanup: () => fs.promises.rm(tempOutputFile)}\n}\n\nasync function downloadFromFile(filePath: string) {\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const getDocuments = () => extractDocumentsFromNdjsonOrTarball(fs.createReadStream(filePath))\n\n for await (const document of getDocuments()) {\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n }\n }\n\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n return {documentIds, referencedIds, getDocuments, cleanup: undefined}\n}\n\ninterface CheckReferenceExistenceOptions {\n client: SanityClient\n referencedIds: Set<string>\n documentIds: Set<string>\n}\n\nasync function checkReferenceExistence({\n client,\n documentIds,\n referencedIds: _referencedIds,\n}: CheckReferenceExistenceOptions) {\n const existingIds = new Set(documentIds)\n const idsToCheck = Array.from(_referencedIds)\n .filter((id) => !existingIds.has(id) && isValidId(id))\n .sort()\n\n const batches = idsToCheck.reduce<string[][]>(\n (acc, next, index) => {\n const batchIndex = Math.floor(index / REFERENCE_INTEGRITY_BATCH_SIZE)\n const batch = acc[batchIndex]\n batch.push(next)\n return acc\n },\n Array.from<string[]>({\n length: Math.ceil(idsToCheck.length / REFERENCE_INTEGRITY_BATCH_SIZE),\n }).map(() => []),\n )\n\n for (const batch of batches) {\n const {omitted} = await client.request<AvailabilityResponse>({\n uri: client.getDataUrl('doc', batch.join(',')),\n json: true,\n query: {excludeContent: 'true'},\n tag: 'documents-availability',\n })\n\n const omittedIds = omitted.reduce<Record<string, 'existence' | 'permission'>>((acc, next) => {\n acc[next.id] = next.reason\n return acc\n }, {})\n\n for (const id of batch) {\n // unless the document ID is in the `omitted` object explictly due to\n // the reason `'existence'`, then it should exist\n if (omittedIds[id] !== 'existence') {\n existingIds.add(id)\n }\n }\n }\n report.event.loadedReferenceIntegrity()\n\n return {existingIds}\n}\n\nasync function main() {\n // note: this is dynamically imported because this module is ESM only and this\n // file gets compiled to CJS at this time\n const {default: pMap} = await import('p-map')\n\n const cleanupBrowserEnvironment = mockBrowserEnvironment(workDir)\n\n let cleanupDownloadedDocuments: (() => Promise<void>) | undefined\n\n try {\n const {client, workspace} = await loadWorkspace()\n const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath\n ? await downloadFromFile(ndjsonFilePath)\n : await downloadFromExport(client)\n cleanupDownloadedDocuments = cleanup\n const {existingIds} = await checkReferenceExistence({client, referencedIds, documentIds})\n\n const getClient = <TOptions extends Partial<ClientConfig>>(options: TOptions) =>\n client.withConfig(options)\n\n const getDocumentExists: ValidationContext['getDocumentExists'] = ({id}) =>\n Promise.resolve(existingIds.has(id))\n\n const getLevel = (markers: ValidationMarker[]) => {\n let foundWarning = false\n for (const marker of markers) {\n if (marker.level === 'error') return 'error'\n if (marker.level === 'warning') foundWarning = true\n }\n\n if (foundWarning) return 'warning'\n return 'info'\n }\n\n let validatedCount = 0\n\n const validate = async (document: SanityDocument) => {\n let markers: ValidationMarker[]\n\n try {\n const timeout = Symbol('timeout')\n\n const result = await Promise.race([\n validateDocument({\n document,\n workspace,\n getClient,\n getDocumentExists,\n environment: 'cli',\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n }),\n new Promise<typeof timeout>((resolve) =>\n setTimeout(() => resolve(timeout), DOCUMENT_VALIDATION_TIMEOUT),\n ),\n ])\n\n if (result === timeout) {\n throw new Error(\n `Document '${document._id}' failed to validate within ${DOCUMENT_VALIDATION_TIMEOUT}ms.`,\n )\n }\n\n markers = result\n // remove deprecated `item` from the marker\n .map(({item, ...marker}) => marker)\n // filter out unwanted levels\n .filter((marker) => {\n const markerValue = levelValues[marker.level]\n const flagLevelValue =\n levelValues[level as keyof typeof levelValues] ?? levelValues.info\n return markerValue <= flagLevelValue\n })\n } catch (err) {\n const errorMessage =\n isRecord(err) && typeof err.message === 'string' ? err.message : 'Unknown error'\n\n const message = `Exception occurred while validating value: ${errorMessage}`\n\n markers = [\n {\n message,\n level: 'error',\n path: [],\n },\n ]\n }\n\n validatedCount++\n\n const intentUrl =\n studioHost &&\n `${studioHost}${path.resolve(\n workspace.basePath,\n `/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent(\n document._type,\n )}`,\n )}`\n\n report.stream.validation.emit({\n documentId: document._id,\n documentType: document._type,\n revision: document._rev,\n ...(intentUrl && {intentUrl}),\n markers,\n validatedCount,\n level: getLevel(markers),\n })\n }\n\n await pMap(getDocuments(), validate, {concurrency: MAX_VALIDATION_CONCURRENCY})\n\n report.stream.validation.end()\n } finally {\n await cleanupDownloadedDocuments?.()\n cleanupBrowserEnvironment()\n }\n}\n"],"names":["HEADER_SIZE","isGzip","buf","length","isDeflate","isTar","extract","stream","extractor","drained","Promise","resolve","reject","setTimeout","chunk","write","end","err","destroy","maybeExtractNdjson","buffer","Buffer","alloc","concat","fileHeader","restOfStream","zlib","createGunzip","createDeflate","entry","tar","filename","path","basename","header","name","extname","toLowerCase","startsWith","ndjsonChunk","extractDocumentsFromNdjsonOrTarball","file","lines","readline","createInterface","input","Readable","from","line","trimmed","trim","JSON","parse","close","createReporter","parentPort","Error","event","Proxy","get","target","payload","message","type","postMessage","emit","MAX_VALIDATION_CONCURRENCY","DOCUMENT_VALIDATION_TIMEOUT","REFERENCE_INTEGRITY_BATCH_SIZE","clientConfig","workDir","workspace","workspaceName","configPath","dataset","ndjsonFilePath","projectId","level","maxCustomValidationConcurrency","maxFetchConcurrency","studioHost","_workerData","isMainThread","levelValues","error","warning","info","report","getReferenceIds","value","ids","Set","traverse","node","isReference","add","_ref","item","Object","values","idRegex","isValidId","id","test","shouldIncludeDocument","document","_type","readerToGenerator","reader","done","read","main","then","process","exit","loadWorkspace","workspaces","getStudioWorkspaces","basePath","_workspace","find","w","client","createClient","requestTagPrefix","config","apiVersion","loadedWorkspace","downloadFromExport","exportUrl","URL","getUrl","documentCount","fetch","loadedDocumentCount","token","headers","Headers","Authorization","body","getReader","downloadedCount","referencedIds","documentIds","slugDate","Date","toISOString","replace","tempOutputFile","join","os","tmpdir","outputStream","fs","createWriteStream","_id","referenceId","exportProgress","exportFinished","totalDocumentsToValidate","size","getDocuments","createReadStream","cleanup","promises","rm","downloadFromFile","filePath","undefined","checkReferenceExistence","_referencedIds","existingIds","idsToCheck","Array","filter","has","sort","batches","reduce","acc","next","index","batchIndex","Math","floor","batch","push","ceil","map","omitted","request","uri","getDataUrl","json","query","excludeContent","tag","omittedIds","reason","loadedReferenceIntegrity","default","pMap","cleanupBrowserEnvironment","mockBrowserEnvironment","cleanupDownloadedDocuments","getClient","options","withConfig","getDocumentExists","getLevel","markers","foundWarning","marker","validatedCount","validate","timeout","Symbol","result","race","validateDocument","environment","markerValue","flagLevelValue","isRecord","intentUrl","encodeURIComponent","validation","documentId","documentType","revision","_rev","concurrency"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQA,MAAMA,cAAc,KAGdC,SAAUC,CAAAA,QACdA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,MAAQA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,GAGhEE,YAAaF,CACjBA,QAAAA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,QAASA,IAAI,CAAC,MAAM,KAAKA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,MAGjFG,QAASH,CACbA,QAAAA,IAAIC,UAAU,OACdD,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,MACbA,IAAI,GAAG,MAAM;AAEf,gBAAgBI,QACdC,QACAC,WACA;AAIA,QAAMC,UAAU,IAAIC,QAAc,CAACC,SAASC,WAAW;AAErDC,eAAW,YAAY;AACjB,UAAA;AACF,yBAAiBC,SAASP,OAAkBQ,WAAAA,MAAMD,KAAK;AAC7CE,kBAAAA,OACVL,QAAQ;AAAA,eACDM,KAAK;AACZL,eAAOK,GAAG;AAAA,MAAA;AAAA,IACZ,CACD;AAAA,EAAA,CACF;AAGD,SAAOT,WACP,MAAMC,SACND,UAAUU,QAAQ;AACpB;AASA,gBAAgBC,mBAAmBZ,QAAsD;AACnFa,MAAAA,SAASC,OAAOC,MAAM,CAAC;AAE3B,mBAAiBR,SAASP,QAAQ;AAChCa,QAAAA,SAASC,OAAOE,OAAO,CAACH,QAAQN,KAAK,CAAC,GAClCM,OAAOjB,SAASH,YAAa;AAE3BwB,UAAAA,aAAaJ,QACbK,eAAe,mBAA+B;AAClD,YAAMD,YACN,OAAOjB;AAAAA,IACT;AAEIN,QAAAA,OAAOuB,UAAU,GAAG;AACtB,aAAOL,mBAAmBb,QAAQmB,gBAAgBC,cAAKC,QAAAA,aAAAA,CAAc,CAAC;AACtE;AAAA,IAAA;AAGEvB,QAAAA,UAAUoB,UAAU,GAAG;AACzB,aAAOL,mBAAmBb,QAAQmB,gBAAgBC,cAAKE,QAAAA,cAAAA,CAAe,CAAC;AACvE;AAAA,IAAA;AAGF,QAAIvB,MAAMmB,UAAU;AAClB,uBAAiBK,SAASvB,QAAQmB,gBAAgBK,aAAIxB,QAAAA,QAAAA,CAAS,GAAG;AAChE,cAAMyB,WAAWC,cAAAA,QAAKC,SAASJ,MAAMK,OAAOC,IAAI;AAG5CC,YAAAA,EAFYJ,cAAKI,QAAAA,QAAQL,QAAQ,EAAEM,kBAEvB,aAAaN,SAASO,WAAW,GAAG,IAEpD;AAAiBC,2BAAAA,eAAeV,MAAaU,OAAAA;AAC7C;AAAA,QAAA;AAAA,MAAA;AAIJ,WAAOd,aAAa;AAAA,EAAA;AAExB;AAMA,gBAAuBe,oCACrBC,MAC+B;AACzBC,QAAAA,QAAQC,0BAASC,gBAAgB;AAAA,IACrCC,OAAOC,YAAAA,SAASC,KAAK5B,mBAAmBsB,IAAI,CAAC;AAAA,EAAA,CAC9C;AAED,mBAAiBO,QAAQN,OAAO;AACxBO,UAAAA,UAAUD,KAAKE,KAAK;AACtBD,gBAAS,MAAME,KAAKC,MAAMH,OAAO;AAAA,EAAA;AAEvCP,QAAMW,MAAM;AACd;AC0EO,SAASC,eACdC,YACuC;AACvC,MAAI,CAACA;AACG,UAAA,IAAIC,MAAM,sBAAsB;AAGjC,SAAA;AAAA,IACLC,OAAO,IAAIC,MAAM,IAAsD;AAAA,MACrEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAEV0B,CAAY,YAAA;AAChD,cAAMC,UAAwB;AAAA,UAACC,MAAM;AAAA,UAAS5B;AAAAA,UAAM0B;AAAAA,QAAO;AAC3DN,mBAAWS,YAAYF,OAAO;AAAA,MAAA;AAAA,IAChC,CAIH;AAAA,IACDvD,QAAQ,IAAImD,MAAM,IAAuD;AAAA,MACvEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAET;AAAA,QACrC8B,MAAOJ,CAAY,YAAA;AACjB,gBAAMC,UAAiC;AAAA,YAACC,MAAM;AAAA,YAAY5B;AAAAA,YAAM0B;AAAAA,UAAO;AACvEN,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,QACA9C,KAAKA,MAAM;AACT,gBAAM8C,UAA4B;AAAA,YAACC,MAAM;AAAA,YAAO5B;AAAAA,UAAI;AACpDoB,qBAAWS,YAAYF,OAAO;AAAA,QAAA;AAAA,MAChC;AAAA,IAKL,CAAA;AAAA,EACH;AACF;AC1MA,MAAMI,6BAA6B,KAC7BC,8BAA8B,KAC9BC,iCAAiC,KA4CjC;AAAA,EACJC;AAAAA,EACAC;AAAAA,EACAC,WAAWC;AAAAA,EACXC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AACF,IAAIC,oBAAAA;AAEJ,IAAIC,oBAAAA,gBAAgB,CAAC3B,oBAAAA;AACb,QAAA,IAAIC,MAAM,4CAA4C;AAG9D,MAAM2B,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAE5CC,SAASjC,eAAwCC,oBAAAA,UAAU,GAE3DiC,kBAAmBC,CAAmB,UAAA;AACpCC,QAAAA,0BAAUC,IAAY;AAE5B,WAASC,SAASC,MAAe;AAC3BC,QAAAA,MAAAA,YAAYD,IAAI,GAAG;AACjBE,UAAAA,IAAIF,KAAKG,IAAI;AACjB;AAAA,IAAA;AAGE,QAAA,OAAOH,QAAS,YAAYA;AAE9B,iBAAWI,QAAQC,OAAOC,OAAON,IAAI,YAAYI,IAAI;AAAA,EAAA;AAIzDL,SAAAA,SAASH,KAAK,GAEPC;AACT,GAEMU,UAAU,uBAGVC,YAAaC,CAAAA,OAAgB,OAAOA,MAAO,YAAYF,QAAQG,KAAKD,EAAE,GACtEE,wBAAyBC,CAEtB,aAAA,CAACA,SAASC,MAAMpE,WAAW,SAAS,KAAK,CAACmE,SAASC,MAAMpE,WAAW,SAAS;AAGtF,gBAAgBqE,kBAAkBC,QAAiD;AACpE,aAAA;AACL,UAAA;AAAA,MAACnB;AAAAA,MAAOoB;AAAAA,IAAAA,IAAQ,MAAMD,OAAOE,KAAK;AACpCrB,QAAAA,UAAO,MAAMA,QACboB,KAAM;AAAA,EAAA;AAEd;AAEAE,KAAAA,EAAOC,KAAK,MAAMC,QAAQC,MAAM;AAEhC,eAAeC,gBAAgB;AACvBC,QAAAA,aAAa,MAAMC,wCAAoB;AAAA,IAACC,UAAUhD;AAAAA,IAASG;AAAAA,EAAAA,CAAW;AAE5E,MAAI,CAAC2C,WAAWjH;AACR,UAAA,IAAIqD,MAAM,8CAA8C;AAG5D+D,MAAAA;AACA/C,MAAAA;AACF+C,QAAAA,aAAaH,WAAWI,KAAMC,CAAAA,MAAMA,EAAEtF,SAASqC,aAAa,GACxD,CAAC+C;AACH,YAAM,IAAI/D,MAAM,6CAA6CgB,aAAa,IAAI;AAAA,SAE3E;AACL,QAAI4C,WAAWjH,WAAW;AAClB,YAAA,IAAIqD,MACR,sFACF;AAEF+D,iBAAaH,WAAW,CAAC;AAAA,EAAA;AAErB7C,QAAAA,YAAYgD,YAEZG,WAASC,oBAAa;AAAA,IAC1B,GAAGtD;AAAAA,IACHK,SAASA,WAAWH,UAAUG;AAAAA,IAC9BE,WAAWA,aAAaL,UAAUK;AAAAA,IAClCgD,kBAAkB;AAAA,EACnB,CAAA,EAAEC,OAAO;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc;AAErCvC,SAAAA,OAAO9B,MAAMsE,gBAAgB;AAAA,IAC3BnD,WAAWL,UAAUK;AAAAA,IACrBF,SAASH,UAAUG;AAAAA,IACnBvC,MAAMoC,UAAUpC;AAAAA,IAChBmF,UAAU/C,UAAU+C;AAAAA,EAAAA,CACrB,GAEM;AAAA,IAAC/C;AAAAA,IAAWmD,QAAAA;AAAAA,EAAM;AAC3B;AAEA,eAAeM,mBAAmBN,SAAsB;AACtD,QAAMO,YAAY,IAAIC,IAAIR,QAAOS,OAAO,gBAAgBT,QAAOG,OAAO,EAAEnD,OAAO,IAAI,EAAK,CAAC,GAEnF0D,gBAAgB,MAAMV,QAAOW,MAAM,WAAW;AACpD9C,SAAO9B,MAAM6E,oBAAoB;AAAA,IAACF;AAAAA,EAAAA,CAAc;AAE1C,QAAA;AAAA,IAACG;AAAAA,EAAAA,IAASb,QAAOG,UAKjBjB,UAJW,MAAMyB,MAAMJ,WAAW;AAAA,IACtCO,SAAS,IAAIC,QAAQ;AAAA,MAAC,GAAIF,SAAS;AAAA,QAACG,eAAe,UAAUH,KAAK;AAAA,MAAA;AAAA,IAAK,CAAA;AAAA,EAAA,CACxE,GAEuBI,MAAMC,UAAU;AACxC,MAAI,CAAChC,OAAc,OAAA,IAAIpD,MAAM,0CAA0C;AAEvE,MAAIqF,kBAAkB;AAChBC,QAAAA,gBAAoBnD,oBAAAA,IAAAA,GACpBoD,kCAAkBpD,OAClBjD,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IAACC,OAAOC,YAAAA,SAASC,KAAK4D,kBAAkBC,MAAM,CAAC;AAAA,EAAE,CAAA,GAKlFoC,YAAeC,oBAAAA,KAClBC,GAAAA,YAAAA,EACAC,QAAQ,eAAe,GAAG,EAC1B9G,YACG+G,GAAAA,iBAAiBpH,sBAAKqH,KAAKC,YAAGC,QAAAA,OAAU,GAAA,mBAAmBP,QAAQ,SAAS,GAC5EQ,eAAeC,oBAAGC,kBAAkBN,cAAc;AAExD,mBAAiBpG,QAAQN,OAAO;AACxB+D,UAAAA,WAAWtD,KAAKC,MAAMJ,IAAI;AAE5BwD,QAAAA,sBAAsBC,QAAQ,GAAG;AACvBV,kBAAAA,IAAIU,SAASkD,GAAG;AACjBC,iBAAAA,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAGlB7I,mBAAAA,MAAM,GAAGiC,IAAI;AAAA,CAAI;AAAA,IAAA;AAIhCuC,uBAAAA,OAAOhF,OAAOsJ,eAAe5F,KAAK;AAAA,MAAC4E;AAAAA,MAAiBT;AAAAA,IAAAA,CAAc;AAAA,EAAA;AAG9D,SAAA,MAAA,IAAI1H,QAAc,CAACC,SAASC,WAChC4I,aAAanG,MAAOpC,CAASA,QAAAA,MAAML,OAAOK,GAAG,IAAIN,QAAU,CAAA,CAC7D,GAEA4E,OAAOhF,OAAOsJ,eAAe7I,OAC7BuE,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAKjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB,cAHfA,MACnBzH,oCAAoCiH,YAAGS,QAAAA,iBAAiBd,cAAc,CAAC;AAAA,IAEvBe,SAASA,MAAMV,YAAAA,QAAGW,SAASC,GAAGjB,cAAc;AAAA,EAAC;AACjG;AAEA,eAAekB,iBAAiBC,UAAkB;AAChD,QAAMzB,gBAAgB,oBAAInD,OACpBoD,cAAkBpD,oBAAAA,IAAAA,GAClBsE,eAAeA,MAAMzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBK,QAAQ,CAAC;AAE5F,mBAAiB9D,YAAYwD,aAAa;AACpCzD,QAAAA,sBAAsBC,QAAQ,GAAG;AACvBV,kBAAAA,IAAIU,SAASkD,GAAG;AACjBC,iBAAAA,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAAA,IAAA;AAKnCrE,SAAAA,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAEjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB;AAAAA,IAAcE,SAASK;AAAAA,EAAS;AACtE;AAQA,eAAeC,wBAAwB;AAAA,EACrC/C,QAAAA;AAAAA,EACAqB;AAAAA,EACAD,eAAe4B;AACe,GAAG;AACjC,QAAMC,cAAc,IAAIhF,IAAIoD,WAAW,GACjC6B,aAAaC,MAAM9H,KAAK2H,cAAc,EACzCI,OAAQxE,CAAAA,OAAO,CAACqE,YAAYI,IAAIzE,EAAE,KAAKD,UAAUC,EAAE,CAAC,EACpD0E,KAEGC,GAAAA,UAAUL,WAAWM,OACzB,CAACC,KAAKC,MAAMC,UAAU;AACpB,UAAMC,aAAaC,KAAKC,MAAMH,QAAQjH,8BAA8B;AAEpEqH,WADcN,IAAIG,UAAU,EACtBI,KAAKN,IAAI,GACRD;AAAAA,EAAAA,GAETN,MAAM9H,KAAe;AAAA,IACnB5C,QAAQoL,KAAKI,KAAKf,WAAWzK,SAASiE,8BAA8B;AAAA,EAAA,CACrE,EAAEwH,IAAI,MAAM,CAAA,CAAE,CACjB;AAEA,aAAWH,SAASR,SAAS;AACrB,UAAA;AAAA,MAACY;AAAAA,IAAAA,IAAW,MAAMnE,QAAOoE,QAA8B;AAAA,MAC3DC,KAAKrE,QAAOsE,WAAW,OAAOP,MAAMpC,KAAK,GAAG,CAAC;AAAA,MAC7C4C,MAAM;AAAA,MACNC,OAAO;AAAA,QAACC,gBAAgB;AAAA,MAAM;AAAA,MAC9BC,KAAK;AAAA,IAAA,CACN,GAEKC,aAAaR,QAAQX,OAAmD,CAACC,KAAKC,UAClFD,IAAIC,KAAK9E,EAAE,IAAI8E,KAAKkB,QACbnB,MACN,CAAA,CAAE;AAEL,eAAW7E,MAAMmF;AAGXY,iBAAW/F,EAAE,MAAM,eACrBqE,YAAY5E,IAAIO,EAAE;AAAA,EAAA;AAIjB7C,SAAAA,OAAAA,MAAM8I,4BAEN;AAAA,IAAC5B;AAAAA,EAAW;AACrB;AAEA,eAAe5D,OAAO;AAGd,QAAA;AAAA,IAACyF,SAASC;AAAAA,MAAQ,MAAM,OAAO,OAAO,GAEtCC,4BAA4BC,uBAAAA,uBAAuBrI,OAAO;AAE5DsI,MAAAA;AAEA,MAAA;AACI,UAAA;AAAA,MAAClF,QAAAA;AAAAA,MAAQnD;AAAAA,IAAAA,IAAa,MAAM4C,cAAAA,GAC5B;AAAA,MAAC4B;AAAAA,MAAaD;AAAAA,MAAemB;AAAAA,MAAcE;AAAAA,IAAAA,IAAWxF,iBACxD,MAAM2F,iBAAiB3F,cAAc,IACrC,MAAMqD,mBAAmBN,OAAM;AACNyC,iCAAAA;AACvB,UAAA;AAAA,MAACQ;AAAAA,IAAW,IAAI,MAAMF,wBAAwB;AAAA,MAAC/C,QAAAA;AAAAA,MAAQoB;AAAAA,MAAeC;AAAAA,IAAAA,CAAY,GAElF8D,YAAqDC,CAAAA,YACzDpF,QAAOqF,WAAWD,OAAO,GAErBE,oBAA4DA,CAAC;AAAA,MAAC1G;AAAAA,IAAAA,MAClE5F,QAAQC,QAAQgK,YAAYI,IAAIzE,EAAE,CAAC,GAE/B2G,WAAYC,CAAgC,YAAA;AAChD,UAAIC,eAAe;AACnB,iBAAWC,UAAUF,SAAS;AACxBE,YAAAA,OAAOvI,UAAU,QAAgB,QAAA;AACjCuI,eAAOvI,UAAU,cAAWsI,eAAe;AAAA,MAAA;AAGjD,aAAIA,eAAqB,YAClB;AAAA,IACT;AAEA,QAAIE,iBAAiB;AAEfC,UAAAA,WAAW,OAAO7G,aAA6B;AAC/CyG,UAAAA;AAEA,UAAA;AACIK,cAAAA,UAAUC,OAAO,SAAS,GAE1BC,SAAS,MAAM/M,QAAQgN,KAAK,CAChCC,wBAAiB;AAAA,UACflH;AAAAA,UACAlC;AAAAA,UACAsI;AAAAA,UACAG;AAAAA,UACAY,aAAa;AAAA,UACb9I;AAAAA,UACAC;AAAAA,QACD,CAAA,GACD,IAAIrE,QAAyBC,CAC3BE,YAAAA,WAAW,MAAMF,QAAQ4M,OAAO,GAAGpJ,2BAA2B,CAChE,CAAC,CACF;AAED,YAAIsJ,WAAWF;AACb,gBAAM,IAAI/J,MACR,aAAaiD,SAASkD,GAAG,+BAA+BxF,2BAA2B,KACrF;AAGQsJ,kBAAAA,OAEP7B,IAAI,CAAC;AAAA,UAAC3F;AAAAA,UAAM,GAAGmH;AAAAA,QAAYA,MAAAA,MAAM,EAEjCtC,OAAQsC,CAAW,WAAA;AACZS,gBAAAA,cAAc1I,YAAYiI,OAAOvI,KAAK,GACtCiJ,iBACJ3I,YAAYN,KAAK,KAAiCM,YAAYG;AAChE,iBAAOuI,eAAeC;AAAAA,QAAAA,CACvB;AAAA,eACI7M,KAAK;AAMZiM,kBAAU,CACR;AAAA,UACEpJ,SAJY,8CAFdiK,OAAAA,SAAS9M,GAAG,KAAK,OAAOA,IAAI6C,WAAY,WAAW7C,IAAI6C,UAAU,eAEO;AAAA,UAKtEe,OAAO;AAAA,UACP7C,MAAM,CAAA;AAAA,QAAA,CACP;AAAA,MAAA;AAILqL;AAEMW,YAAAA,YACJhJ,cACA,GAAGA,UAAU,GAAGhD,cAAAA,QAAKrB,QACnB4D,UAAU+C,UACV,mBAAmB2G,mBAAmBxH,SAASkD,GAAG,CAAC,SAASsE,mBAC1DxH,SAASC,KACX,CAAC,EACH,CAAC;AAEInG,aAAAA,OAAO2N,WAAWjK,KAAK;AAAA,QAC5BkK,YAAY1H,SAASkD;AAAAA,QACrByE,cAAc3H,SAASC;AAAAA,QACvB2H,UAAU5H,SAAS6H;AAAAA,QACnB,GAAIN,aAAa;AAAA,UAACA;AAAAA,QAAS;AAAA,QAC3Bd;AAAAA,QACAG;AAAAA,QACAxI,OAAOoI,SAASC,OAAO;AAAA,MAAA,CACxB;AAAA,IACH;AAEMT,UAAAA,KAAKxC,aAAa,GAAGqD,UAAU;AAAA,MAACiB,aAAarK;AAAAA,IAA2B,CAAA,GAE9EqB,OAAOhF,OAAO2N,WAAWlN,IAAI;AAAA,EAAA,UACrB;AACF4L,UAAAA,gCACNF,0BAA0B;AAAA,EAAA;AAE9B;"}
1
+ {"version":3,"file":"validateDocuments.js","sources":["../../../../src/_internal/cli/util/extractDocumentsFromNdjsonOrTarball.ts","../../../../src/_internal/cli/util/workerChannels.ts","../../../../src/_internal/cli/threads/validateDocuments.ts"],"sourcesContent":["import path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable, type Writable} from 'node:stream'\nimport zlib from 'node:zlib'\n\nimport {type SanityDocument} from '@sanity/types'\nimport tar from 'tar-stream'\n\nconst HEADER_SIZE = 300\n\n// https://github.com/kevva/is-gzip/blob/13dab7c877787bd5cff9de5482b1736f00df99c6/index.js\nconst isGzip = (buf: Buffer) =>\n buf.length >= 3 && buf[0] === 0x1f && buf[1] === 0x8b && buf[2] === 0x08\n\n// https://github.com/watson/is-deflate/blob/f9e8f0c7814eed715e13e29e97c69acee319686a/index.js\nconst isDeflate = (buf: Buffer) =>\n buf.length >= 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n\n// https://github.com/kevva/is-tar/blob/d295ffa2002a5d415946fc3d49f024ace8c28bd3/index.js\nconst isTar = (buf: Buffer) =>\n buf.length >= 262 &&\n buf[257] === 0x75 &&\n buf[258] === 0x73 &&\n buf[259] === 0x74 &&\n buf[260] === 0x61 &&\n buf[261] === 0x72\n\nasync function* extract<TReturn>(\n stream: AsyncIterable<Buffer>,\n extractor: Writable & AsyncIterable<TReturn>,\n) {\n // set up a task to drain the input iterable into the extractor asynchronously\n // before this function delegates to the extractor's iterable (containing the\n // result of the extraction)\n const drained = new Promise<void>((resolve, reject) => {\n // setTimeout is used here to ensure draining occurs after delegation\n setTimeout(async () => {\n try {\n for await (const chunk of stream) extractor.write(chunk)\n extractor.end()\n resolve()\n } catch (err) {\n reject(err)\n }\n })\n })\n\n // have this function delegate the results of the extractor\n yield* extractor\n await drained\n extractor.destroy()\n}\n\n/**\n * Given a async iterable of buffers, looks at the header of the file in the\n * first few bytes to see the file type then extracts the contents tries again.\n * If the given iterable of buffers is a tarball then it looks for an ndjson\n * files and returns another iterable of buffers with the contents of the\n * ndjson file\n */\nasync function* maybeExtractNdjson(stream: AsyncIterable<Buffer>): AsyncIterable<Buffer> {\n let buffer = Buffer.alloc(0)\n\n for await (const chunk of stream) {\n buffer = Buffer.concat([buffer, chunk])\n if (buffer.length < HEADER_SIZE) continue\n\n const fileHeader = buffer\n const restOfStream = async function* restOfStream() {\n yield fileHeader\n yield* stream\n }\n\n if (isGzip(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createGunzip()))\n return\n }\n\n if (isDeflate(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createDeflate()))\n return\n }\n\n if (isTar(fileHeader)) {\n for await (const entry of extract(restOfStream(), tar.extract())) {\n const filename = path.basename(entry.header.name)\n const extname = path.extname(filename).toLowerCase()\n // ignore hidden and non-ndjson files\n if (extname !== '.ndjson' || filename.startsWith('.')) continue\n\n for await (const ndjsonChunk of entry) yield ndjsonChunk\n return\n }\n }\n\n yield* restOfStream()\n }\n}\n\n/**\n * Takes in an async iterable of buffers from an ndjson file or tarball and\n * returns an async iterable of sanity documents.\n */\nexport async function* extractDocumentsFromNdjsonOrTarball(\n file: AsyncIterable<Buffer>,\n): AsyncIterable<SanityDocument> {\n const lines = readline.createInterface({\n input: Readable.from(maybeExtractNdjson(file)),\n })\n\n for await (const line of lines) {\n const trimmed = line.trim()\n if (trimmed) yield JSON.parse(trimmed) as SanityDocument\n }\n lines.close()\n}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import fs from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable} from 'node:stream'\nimport {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n type ClientConfig,\n createClient,\n type SanityClient,\n type SanityDocument,\n} from '@sanity/client'\nimport {isReference, type ValidationContext, type ValidationMarker} from '@sanity/types'\nimport {isRecord, validateDocument} from 'sanity'\n\nimport {extractDocumentsFromNdjsonOrTarball} from '../util/extractDocumentsFromNdjsonOrTarball'\nimport {getStudioWorkspaces} from '../util/getStudioWorkspaces'\nimport {mockBrowserEnvironment} from '../util/mockBrowserEnvironment'\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannels'\n\nconst MAX_VALIDATION_CONCURRENCY = 100\nconst DOCUMENT_VALIDATION_TIMEOUT = 30000\nconst REFERENCE_INTEGRITY_BATCH_SIZE = 100\n\ninterface AvailabilityResponse {\n omitted: {id: string; reason: 'existence' | 'permission'}[]\n}\n\n/** @internal */\nexport interface ValidateDocumentsWorkerData {\n workDir: string\n configPath?: string\n workspace?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string\n dataset?: string\n ndjsonFilePath?: string\n level?: ValidationMarker['level']\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n studioHost?: string\n}\n\n/** @internal */\nexport type ValidationWorkerChannel = WorkerChannel<{\n loadedWorkspace: WorkerChannelEvent<{\n name: string\n projectId: string\n dataset: string\n basePath: string\n }>\n loadedDocumentCount: WorkerChannelEvent<{documentCount: number}>\n exportProgress: WorkerChannelStream<{downloadedCount: number; documentCount: number}>\n exportFinished: WorkerChannelEvent<{totalDocumentsToValidate: number}>\n loadedReferenceIntegrity: WorkerChannelEvent\n validation: WorkerChannelStream<{\n validatedCount: number\n documentId: string\n documentType: string\n intentUrl?: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n }>\n}>\n\nconst {\n clientConfig,\n workDir,\n workspace: workspaceName,\n configPath,\n dataset,\n ndjsonFilePath,\n projectId,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost,\n} = _workerData as ValidateDocumentsWorkerData\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst levelValues = {error: 0, warning: 1, info: 2} as const\n\nconst report = createReporter<ValidationWorkerChannel>(parentPort)\n\nconst getReferenceIds = (value: unknown) => {\n const ids = new Set<string>()\n\n function traverse(node: unknown) {\n if (isReference(node)) {\n ids.add(node._ref)\n return\n }\n\n if (typeof node === 'object' && node) {\n // Note: this works for arrays too\n for (const item of Object.values(node)) traverse(item)\n }\n }\n\n traverse(value)\n\n return ids\n}\n\nconst idRegex = /^[^-][A-Z0-9._-]*$/i\n\n// during testing, the `doc` endpoint 502'ed if given an invalid ID\nconst isValidId = (id: unknown) => typeof id === 'string' && idRegex.test(id)\nconst shouldIncludeDocument = (document: SanityDocument) => {\n // Filter out system documents\n return !document._type.startsWith('system.')\n}\n\nasync function* readerToGenerator(reader: ReadableStreamDefaultReader<Uint8Array>) {\n while (true) {\n const {value, done} = await reader.read()\n if (value) yield value\n if (done) return\n }\n}\n\nmain().then(() => process.exit())\n\nasync function loadWorkspace() {\n const workspaces = await getStudioWorkspaces({basePath: workDir, configPath})\n\n if (!workspaces.length) {\n throw new Error(`Configuration did not return any workspaces.`)\n }\n\n let _workspace\n if (workspaceName) {\n _workspace = workspaces.find((w) => w.name === workspaceName)\n if (!_workspace) {\n throw new Error(`Could not find any workspaces with name \\`${workspaceName}\\``)\n }\n } else {\n if (workspaces.length !== 1) {\n throw new Error(\n \"Multiple workspaces found. Please specify which workspace to use with '--workspace'.\",\n )\n }\n _workspace = workspaces[0]\n }\n const workspace = _workspace\n\n const client = createClient({\n ...clientConfig,\n dataset: dataset || workspace.dataset,\n projectId: projectId || workspace.projectId,\n requestTagPrefix: 'sanity.cli.validate',\n }).config({apiVersion: 'v2021-03-25'})\n\n report.event.loadedWorkspace({\n projectId: workspace.projectId,\n dataset: workspace.dataset,\n name: workspace.name,\n basePath: workspace.basePath,\n })\n\n return {workspace, client}\n}\n\nasync function downloadFromExport(client: SanityClient) {\n const exportUrl = new URL(client.getUrl(`/data/export/${client.config().dataset}`, false))\n\n const documentCount = await client.fetch('length(*)')\n report.event.loadedDocumentCount({documentCount})\n\n const {token} = client.config()\n const response = await fetch(exportUrl, {\n headers: new Headers({...(token && {Authorization: `Bearer ${token}`})}),\n })\n\n const reader = response.body?.getReader()\n if (!reader) throw new Error('Could not get reader from response body.')\n\n let downloadedCount = 0\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const lines = readline.createInterface({input: Readable.from(readerToGenerator(reader))})\n\n // Note: we stream the export to a file and then re-read from that file to\n // make this less memory intensive.\n // this is a similar pattern to the import/export CLI commands\n const slugDate = new Date()\n .toISOString()\n .replace(/[^a-z0-9]/gi, '-')\n .toLowerCase()\n const tempOutputFile = path.join(os.tmpdir(), `sanity-validate-${slugDate}.ndjson`)\n const outputStream = fs.createWriteStream(tempOutputFile)\n\n for await (const line of lines) {\n const document = JSON.parse(line) as SanityDocument\n\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n\n outputStream.write(`${line}\\n`)\n }\n\n downloadedCount++\n report.stream.exportProgress.emit({downloadedCount, documentCount})\n }\n\n await new Promise<void>((resolve, reject) =>\n outputStream.close((err) => (err ? reject(err) : resolve())),\n )\n\n report.stream.exportProgress.end()\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n const getDocuments = () =>\n extractDocumentsFromNdjsonOrTarball(fs.createReadStream(tempOutputFile))\n\n return {documentIds, referencedIds, getDocuments, cleanup: () => fs.promises.rm(tempOutputFile)}\n}\n\nasync function downloadFromFile(filePath: string) {\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const getDocuments = () => extractDocumentsFromNdjsonOrTarball(fs.createReadStream(filePath))\n\n for await (const document of getDocuments()) {\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n }\n }\n\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n return {documentIds, referencedIds, getDocuments, cleanup: undefined}\n}\n\ninterface CheckReferenceExistenceOptions {\n client: SanityClient\n referencedIds: Set<string>\n documentIds: Set<string>\n}\n\nasync function checkReferenceExistence({\n client,\n documentIds,\n referencedIds: _referencedIds,\n}: CheckReferenceExistenceOptions) {\n const existingIds = new Set(documentIds)\n const idsToCheck = Array.from(_referencedIds)\n .filter((id) => !existingIds.has(id) && isValidId(id))\n .sort()\n\n const batches = idsToCheck.reduce<string[][]>(\n (acc, next, index) => {\n const batchIndex = Math.floor(index / REFERENCE_INTEGRITY_BATCH_SIZE)\n const batch = acc[batchIndex]\n batch.push(next)\n return acc\n },\n Array.from<string[]>({\n length: Math.ceil(idsToCheck.length / REFERENCE_INTEGRITY_BATCH_SIZE),\n }).map(() => []),\n )\n\n for (const batch of batches) {\n const {omitted} = await client.request<AvailabilityResponse>({\n uri: client.getDataUrl('doc', batch.join(',')),\n json: true,\n query: {excludeContent: 'true'},\n tag: 'documents-availability',\n })\n\n const omittedIds = omitted.reduce<Record<string, 'existence' | 'permission'>>((acc, next) => {\n acc[next.id] = next.reason\n return acc\n }, {})\n\n for (const id of batch) {\n // unless the document ID is in the `omitted` object explictly due to\n // the reason `'existence'`, then it should exist\n if (omittedIds[id] !== 'existence') {\n existingIds.add(id)\n }\n }\n }\n report.event.loadedReferenceIntegrity()\n\n return {existingIds}\n}\n\nasync function main() {\n // note: this is dynamically imported because this module is ESM only and this\n // file gets compiled to CJS at this time\n const {default: pMap} = await import('p-map')\n\n const cleanupBrowserEnvironment = mockBrowserEnvironment(workDir)\n\n let cleanupDownloadedDocuments: (() => Promise<void>) | undefined\n\n try {\n const {client, workspace} = await loadWorkspace()\n const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath\n ? await downloadFromFile(ndjsonFilePath)\n : await downloadFromExport(client)\n cleanupDownloadedDocuments = cleanup\n const {existingIds} = await checkReferenceExistence({client, referencedIds, documentIds})\n\n const getClient = <TOptions extends Partial<ClientConfig>>(options: TOptions) =>\n client.withConfig(options)\n\n const getDocumentExists: ValidationContext['getDocumentExists'] = ({id}) =>\n Promise.resolve(existingIds.has(id))\n\n const getLevel = (markers: ValidationMarker[]) => {\n let foundWarning = false\n for (const marker of markers) {\n if (marker.level === 'error') return 'error'\n if (marker.level === 'warning') foundWarning = true\n }\n\n if (foundWarning) return 'warning'\n return 'info'\n }\n\n let validatedCount = 0\n\n const validate = async (document: SanityDocument) => {\n let markers: ValidationMarker[]\n\n try {\n const timeout = Symbol('timeout')\n\n const result = await Promise.race([\n validateDocument({\n document,\n workspace,\n getClient,\n getDocumentExists,\n environment: 'cli',\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n }),\n new Promise<typeof timeout>((resolve) =>\n setTimeout(() => resolve(timeout), DOCUMENT_VALIDATION_TIMEOUT),\n ),\n ])\n\n if (result === timeout) {\n throw new Error(\n `Document '${document._id}' failed to validate within ${DOCUMENT_VALIDATION_TIMEOUT}ms.`,\n )\n }\n\n markers = result\n // remove deprecated `item` from the marker\n .map(({item, ...marker}) => marker)\n // filter out unwanted levels\n .filter((marker) => {\n const markerValue = levelValues[marker.level]\n const flagLevelValue =\n levelValues[level as keyof typeof levelValues] ?? levelValues.info\n return markerValue <= flagLevelValue\n })\n } catch (err) {\n const errorMessage =\n isRecord(err) && typeof err.message === 'string' ? err.message : 'Unknown error'\n\n const message = `Exception occurred while validating value: ${errorMessage}`\n\n markers = [\n {\n message,\n level: 'error',\n path: [],\n },\n ]\n }\n\n validatedCount++\n\n const intentUrl =\n studioHost &&\n `${studioHost}${path.resolve(\n workspace.basePath,\n `/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent(\n document._type,\n )}`,\n )}`\n\n report.stream.validation.emit({\n documentId: document._id,\n documentType: document._type,\n revision: document._rev,\n ...(intentUrl && {intentUrl}),\n markers,\n validatedCount,\n level: getLevel(markers),\n })\n }\n\n await pMap(getDocuments(), validate, {concurrency: MAX_VALIDATION_CONCURRENCY})\n\n report.stream.validation.end()\n } finally {\n await cleanupDownloadedDocuments?.()\n cleanupBrowserEnvironment()\n }\n}\n"],"names":["HEADER_SIZE","isGzip","buf","length","isDeflate","isTar","extract","stream","extractor","drained","Promise","resolve","reject","setTimeout","chunk","write","end","err","destroy","maybeExtractNdjson","buffer","Buffer","alloc","concat","fileHeader","restOfStream","zlib","createGunzip","createDeflate","entry","tar","filename","path","basename","header","name","extname","toLowerCase","startsWith","ndjsonChunk","extractDocumentsFromNdjsonOrTarball","file","lines","readline","createInterface","input","Readable","from","line","trimmed","trim","JSON","parse","close","createReporter","parentPort","Error","event","Proxy","get","target","payload","message","type","postMessage","emit","MAX_VALIDATION_CONCURRENCY","DOCUMENT_VALIDATION_TIMEOUT","REFERENCE_INTEGRITY_BATCH_SIZE","clientConfig","workDir","workspace","workspaceName","configPath","dataset","ndjsonFilePath","projectId","level","maxCustomValidationConcurrency","maxFetchConcurrency","studioHost","_workerData","isMainThread","levelValues","error","warning","info","report","getReferenceIds","value","ids","Set","traverse","node","isReference","add","_ref","item","Object","values","idRegex","isValidId","id","test","shouldIncludeDocument","document","_type","readerToGenerator","reader","done","read","main","then","process","exit","loadWorkspace","workspaces","getStudioWorkspaces","basePath","_workspace","find","w","client","createClient","requestTagPrefix","config","apiVersion","loadedWorkspace","downloadFromExport","exportUrl","URL","getUrl","documentCount","fetch","loadedDocumentCount","token","headers","Headers","Authorization","body","getReader","downloadedCount","referencedIds","documentIds","slugDate","Date","toISOString","replace","tempOutputFile","join","os","tmpdir","outputStream","fs","createWriteStream","_id","referenceId","exportProgress","exportFinished","totalDocumentsToValidate","size","getDocuments","createReadStream","cleanup","promises","rm","downloadFromFile","filePath","undefined","checkReferenceExistence","_referencedIds","existingIds","idsToCheck","Array","filter","has","sort","batches","reduce","acc","next","index","batchIndex","Math","floor","batch","push","ceil","map","omitted","request","uri","getDataUrl","json","query","excludeContent","tag","omittedIds","reason","loadedReferenceIntegrity","default","pMap","cleanupBrowserEnvironment","mockBrowserEnvironment","cleanupDownloadedDocuments","getClient","options","withConfig","getDocumentExists","getLevel","markers","foundWarning","marker","validatedCount","validate","timeout","Symbol","result","race","validateDocument","environment","markerValue","flagLevelValue","isRecord","intentUrl","encodeURIComponent","validation","documentId","documentType","revision","_rev","concurrency"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQA,MAAMA,cAAc,KAGdC,SAAUC,CAAAA,QACdA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,MAAQA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,GAGhEE,YAAaF,CACjBA,QAAAA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,QAASA,IAAI,CAAC,MAAM,KAAKA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,MAGjFG,QAASH,CACbA,QAAAA,IAAIC,UAAU,OACdD,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,MACbA,IAAI,GAAG,MAAM;AAEf,gBAAgBI,QACdC,QACAC,WACA;AAIA,QAAMC,UAAU,IAAIC,QAAc,CAACC,SAASC,WAAW;AAErDC,eAAW,YAAY;AACjB,UAAA;AACF,yBAAiBC,SAASP,OAAkBQ,WAAAA,MAAMD,KAAK;AAC7CE,kBAAAA,OACVL,QAAQ;AAAA,eACDM,KAAK;AACZL,eAAOK,GAAG;AAAA,MAAA;AAAA,IACZ,CACD;AAAA,EAAA,CACF;AAGD,SAAOT,WACP,MAAMC,SACND,UAAUU,QAAQ;AACpB;AASA,gBAAgBC,mBAAmBZ,QAAsD;AACnFa,MAAAA,SAASC,OAAOC,MAAM,CAAC;AAE3B,mBAAiBR,SAASP,QAAQ;AAChCa,QAAAA,SAASC,OAAOE,OAAO,CAACH,QAAQN,KAAK,CAAC,GAClCM,OAAOjB,SAASH,YAAa;AAE3BwB,UAAAA,aAAaJ,QACbK,eAAe,mBAA+B;AAClD,YAAMD,YACN,OAAOjB;AAAAA,IACT;AAEIN,QAAAA,OAAOuB,UAAU,GAAG;AACtB,aAAOL,mBAAmBb,QAAQmB,gBAAgBC,cAAKC,QAAAA,aAAAA,CAAc,CAAC;AACtE;AAAA,IAAA;AAGEvB,QAAAA,UAAUoB,UAAU,GAAG;AACzB,aAAOL,mBAAmBb,QAAQmB,gBAAgBC,cAAKE,QAAAA,cAAAA,CAAe,CAAC;AACvE;AAAA,IAAA;AAGF,QAAIvB,MAAMmB,UAAU;AAClB,uBAAiBK,SAASvB,QAAQmB,gBAAgBK,aAAIxB,QAAAA,QAAAA,CAAS,GAAG;AAChE,cAAMyB,WAAWC,cAAAA,QAAKC,SAASJ,MAAMK,OAAOC,IAAI;AAG5CC,YAAAA,EAFYJ,cAAKI,QAAAA,QAAQL,QAAQ,EAAEM,kBAEvB,aAAaN,SAASO,WAAW,GAAG,IAEpD;AAAiBC,2BAAAA,eAAeV,MAAaU,OAAAA;AAC7C;AAAA,QAAA;AAAA,MAAA;AAIJ,WAAOd,aAAa;AAAA,EAAA;AAExB;AAMA,gBAAuBe,oCACrBC,MAC+B;AACzBC,QAAAA,QAAQC,0BAASC,gBAAgB;AAAA,IACrCC,OAAOC,YAAAA,SAASC,KAAK5B,mBAAmBsB,IAAI,CAAC;AAAA,EAAA,CAC9C;AAED,mBAAiBO,QAAQN,OAAO;AACxBO,UAAAA,UAAUD,KAAKE,KAAK;AACtBD,gBAAS,MAAME,KAAKC,MAAMH,OAAO;AAAA,EAAA;AAEvCP,QAAMW,MAAM;AACd;AC0EO,SAASC,eACdC,YACuC;AACvC,MAAI,CAACA;AACG,UAAA,IAAIC,MAAM,sBAAsB;AAGjC,SAAA;AAAA,IACLC,OAAO,IAAIC,MAAM,IAAsD;AAAA,MACrEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAEV0B,CAAY,YAAA;AAChD,cAAMC,UAAwB;AAAA,UAACC,MAAM;AAAA,UAAS5B;AAAAA,UAAM0B;AAAAA,QAAO;AAC3DN,mBAAWS,YAAYF,OAAO;AAAA,MAAA;AAAA,IAChC,CAIH;AAAA,IACDvD,QAAQ,IAAImD,MAAM,IAAuD;AAAA,MACvEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAET;AAAA,QACrC8B,MAAOJ,CAAY,YAAA;AACjB,gBAAMC,UAAiC;AAAA,YAACC,MAAM;AAAA,YAAY5B;AAAAA,YAAM0B;AAAAA,UAAO;AACvEN,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,QACA9C,KAAKA,MAAM;AACT,gBAAM8C,UAA4B;AAAA,YAACC,MAAM;AAAA,YAAO5B;AAAAA,UAAI;AACpDoB,qBAAWS,YAAYF,OAAO;AAAA,QAAA;AAAA,MAChC;AAAA,IAKL,CAAA;AAAA,EACH;AACF;AC1MA,MAAMI,6BAA6B,KAC7BC,8BAA8B,KAC9BC,iCAAiC,KA4CjC;AAAA,EACJC;AAAAA,EACAC;AAAAA,EACAC,WAAWC;AAAAA,EACXC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AACF,IAAIC,oBAAAA;AAEJ,IAAIC,oBAAAA,gBAAgB,CAAC3B,oBAAAA;AACb,QAAA,IAAIC,MAAM,4CAA4C;AAG9D,MAAM2B,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAE5CC,SAASjC,eAAwCC,oBAAAA,UAAU,GAE3DiC,kBAAmBC,CAAmB,UAAA;AACpCC,QAAAA,0BAAUC,IAAY;AAE5B,WAASC,SAASC,MAAe;AAC3BC,QAAAA,MAAAA,YAAYD,IAAI,GAAG;AACjBE,UAAAA,IAAIF,KAAKG,IAAI;AACjB;AAAA,IAAA;AAGE,QAAA,OAAOH,QAAS,YAAYA;AAE9B,iBAAWI,QAAQC,OAAOC,OAAON,IAAI,YAAYI,IAAI;AAAA,EAAA;AAIzDL,SAAAA,SAASH,KAAK,GAEPC;AACT,GAEMU,UAAU,uBAGVC,YAAaC,CAAgB,OAAA,OAAOA,MAAO,YAAYF,QAAQG,KAAKD,EAAE,GACtEE,wBAAyBC,CAAAA,aAEtB,CAACA,SAASC,MAAMpE,WAAW,SAAS;AAG7C,gBAAgBqE,kBAAkBC,QAAiD;AACpE,aAAA;AACL,UAAA;AAAA,MAACnB;AAAAA,MAAOoB;AAAAA,IAAAA,IAAQ,MAAMD,OAAOE,KAAK;AACpCrB,QAAAA,UAAO,MAAMA,QACboB,KAAM;AAAA,EAAA;AAEd;AAEAE,KAAAA,EAAOC,KAAK,MAAMC,QAAQC,MAAM;AAEhC,eAAeC,gBAAgB;AACvBC,QAAAA,aAAa,MAAMC,wCAAoB;AAAA,IAACC,UAAUhD;AAAAA,IAASG;AAAAA,EAAAA,CAAW;AAE5E,MAAI,CAAC2C,WAAWjH;AACR,UAAA,IAAIqD,MAAM,8CAA8C;AAG5D+D,MAAAA;AACA/C,MAAAA;AACF+C,QAAAA,aAAaH,WAAWI,KAAMC,CAAAA,MAAMA,EAAEtF,SAASqC,aAAa,GACxD,CAAC+C;AACH,YAAM,IAAI/D,MAAM,6CAA6CgB,aAAa,IAAI;AAAA,SAE3E;AACL,QAAI4C,WAAWjH,WAAW;AAClB,YAAA,IAAIqD,MACR,sFACF;AAEF+D,iBAAaH,WAAW,CAAC;AAAA,EAAA;AAErB7C,QAAAA,YAAYgD,YAEZG,WAASC,oBAAa;AAAA,IAC1B,GAAGtD;AAAAA,IACHK,SAASA,WAAWH,UAAUG;AAAAA,IAC9BE,WAAWA,aAAaL,UAAUK;AAAAA,IAClCgD,kBAAkB;AAAA,EACnB,CAAA,EAAEC,OAAO;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc;AAErCvC,SAAAA,OAAO9B,MAAMsE,gBAAgB;AAAA,IAC3BnD,WAAWL,UAAUK;AAAAA,IACrBF,SAASH,UAAUG;AAAAA,IACnBvC,MAAMoC,UAAUpC;AAAAA,IAChBmF,UAAU/C,UAAU+C;AAAAA,EAAAA,CACrB,GAEM;AAAA,IAAC/C;AAAAA,IAAWmD,QAAAA;AAAAA,EAAM;AAC3B;AAEA,eAAeM,mBAAmBN,SAAsB;AACtD,QAAMO,YAAY,IAAIC,IAAIR,QAAOS,OAAO,gBAAgBT,QAAOG,OAAO,EAAEnD,OAAO,IAAI,EAAK,CAAC,GAEnF0D,gBAAgB,MAAMV,QAAOW,MAAM,WAAW;AACpD9C,SAAO9B,MAAM6E,oBAAoB;AAAA,IAACF;AAAAA,EAAAA,CAAc;AAE1C,QAAA;AAAA,IAACG;AAAAA,EAAAA,IAASb,QAAOG,UAKjBjB,UAJW,MAAMyB,MAAMJ,WAAW;AAAA,IACtCO,SAAS,IAAIC,QAAQ;AAAA,MAAC,GAAIF,SAAS;AAAA,QAACG,eAAe,UAAUH,KAAK;AAAA,MAAA;AAAA,IAAK,CAAA;AAAA,EAAA,CACxE,GAEuBI,MAAMC,UAAU;AACxC,MAAI,CAAChC,OAAc,OAAA,IAAIpD,MAAM,0CAA0C;AAEvE,MAAIqF,kBAAkB;AAChBC,QAAAA,gBAAoBnD,oBAAAA,IAAAA,GACpBoD,kCAAkBpD,OAClBjD,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IAACC,OAAOC,YAAAA,SAASC,KAAK4D,kBAAkBC,MAAM,CAAC;AAAA,EAAE,CAAA,GAKlFoC,YAAeC,oBAAAA,KAClBC,GAAAA,YAAAA,EACAC,QAAQ,eAAe,GAAG,EAC1B9G,YACG+G,GAAAA,iBAAiBpH,sBAAKqH,KAAKC,YAAGC,QAAAA,OAAU,GAAA,mBAAmBP,QAAQ,SAAS,GAC5EQ,eAAeC,oBAAGC,kBAAkBN,cAAc;AAExD,mBAAiBpG,QAAQN,OAAO;AACxB+D,UAAAA,WAAWtD,KAAKC,MAAMJ,IAAI;AAE5BwD,QAAAA,sBAAsBC,QAAQ,GAAG;AACvBV,kBAAAA,IAAIU,SAASkD,GAAG;AACjBC,iBAAAA,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAGlB7I,mBAAAA,MAAM,GAAGiC,IAAI;AAAA,CAAI;AAAA,IAAA;AAIhCuC,uBAAAA,OAAOhF,OAAOsJ,eAAe5F,KAAK;AAAA,MAAC4E;AAAAA,MAAiBT;AAAAA,IAAAA,CAAc;AAAA,EAAA;AAG9D,SAAA,MAAA,IAAI1H,QAAc,CAACC,SAASC,WAChC4I,aAAanG,MAAOpC,CAASA,QAAAA,MAAML,OAAOK,GAAG,IAAIN,QAAU,CAAA,CAC7D,GAEA4E,OAAOhF,OAAOsJ,eAAe7I,OAC7BuE,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAKjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB,cAHfA,MACnBzH,oCAAoCiH,YAAGS,QAAAA,iBAAiBd,cAAc,CAAC;AAAA,IAEvBe,SAASA,MAAMV,YAAAA,QAAGW,SAASC,GAAGjB,cAAc;AAAA,EAAC;AACjG;AAEA,eAAekB,iBAAiBC,UAAkB;AAChD,QAAMzB,gBAAgB,oBAAInD,OACpBoD,cAAkBpD,oBAAAA,IAAAA,GAClBsE,eAAeA,MAAMzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBK,QAAQ,CAAC;AAE5F,mBAAiB9D,YAAYwD,aAAa;AACpCzD,QAAAA,sBAAsBC,QAAQ,GAAG;AACvBV,kBAAAA,IAAIU,SAASkD,GAAG;AACjBC,iBAAAA,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAAA,IAAA;AAKnCrE,SAAAA,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAEjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB;AAAAA,IAAcE,SAASK;AAAAA,EAAS;AACtE;AAQA,eAAeC,wBAAwB;AAAA,EACrC/C,QAAAA;AAAAA,EACAqB;AAAAA,EACAD,eAAe4B;AACe,GAAG;AACjC,QAAMC,cAAc,IAAIhF,IAAIoD,WAAW,GACjC6B,aAAaC,MAAM9H,KAAK2H,cAAc,EACzCI,OAAQxE,CAAAA,OAAO,CAACqE,YAAYI,IAAIzE,EAAE,KAAKD,UAAUC,EAAE,CAAC,EACpD0E,KAEGC,GAAAA,UAAUL,WAAWM,OACzB,CAACC,KAAKC,MAAMC,UAAU;AACpB,UAAMC,aAAaC,KAAKC,MAAMH,QAAQjH,8BAA8B;AAEpEqH,WADcN,IAAIG,UAAU,EACtBI,KAAKN,IAAI,GACRD;AAAAA,EAAAA,GAETN,MAAM9H,KAAe;AAAA,IACnB5C,QAAQoL,KAAKI,KAAKf,WAAWzK,SAASiE,8BAA8B;AAAA,EAAA,CACrE,EAAEwH,IAAI,MAAM,CAAA,CAAE,CACjB;AAEA,aAAWH,SAASR,SAAS;AACrB,UAAA;AAAA,MAACY;AAAAA,IAAAA,IAAW,MAAMnE,QAAOoE,QAA8B;AAAA,MAC3DC,KAAKrE,QAAOsE,WAAW,OAAOP,MAAMpC,KAAK,GAAG,CAAC;AAAA,MAC7C4C,MAAM;AAAA,MACNC,OAAO;AAAA,QAACC,gBAAgB;AAAA,MAAM;AAAA,MAC9BC,KAAK;AAAA,IAAA,CACN,GAEKC,aAAaR,QAAQX,OAAmD,CAACC,KAAKC,UAClFD,IAAIC,KAAK9E,EAAE,IAAI8E,KAAKkB,QACbnB,MACN,CAAA,CAAE;AAEL,eAAW7E,MAAMmF;AAGXY,iBAAW/F,EAAE,MAAM,eACrBqE,YAAY5E,IAAIO,EAAE;AAAA,EAAA;AAIjB7C,SAAAA,OAAAA,MAAM8I,4BAEN;AAAA,IAAC5B;AAAAA,EAAW;AACrB;AAEA,eAAe5D,OAAO;AAGd,QAAA;AAAA,IAACyF,SAASC;AAAAA,MAAQ,MAAM,OAAO,OAAO,GAEtCC,4BAA4BC,uBAAAA,uBAAuBrI,OAAO;AAE5DsI,MAAAA;AAEA,MAAA;AACI,UAAA;AAAA,MAAClF,QAAAA;AAAAA,MAAQnD;AAAAA,IAAAA,IAAa,MAAM4C,cAAAA,GAC5B;AAAA,MAAC4B;AAAAA,MAAaD;AAAAA,MAAemB;AAAAA,MAAcE;AAAAA,IAAAA,IAAWxF,iBACxD,MAAM2F,iBAAiB3F,cAAc,IACrC,MAAMqD,mBAAmBN,OAAM;AACNyC,iCAAAA;AACvB,UAAA;AAAA,MAACQ;AAAAA,IAAW,IAAI,MAAMF,wBAAwB;AAAA,MAAC/C,QAAAA;AAAAA,MAAQoB;AAAAA,MAAeC;AAAAA,IAAAA,CAAY,GAElF8D,YAAqDC,CAAAA,YACzDpF,QAAOqF,WAAWD,OAAO,GAErBE,oBAA4DA,CAAC;AAAA,MAAC1G;AAAAA,IAAAA,MAClE5F,QAAQC,QAAQgK,YAAYI,IAAIzE,EAAE,CAAC,GAE/B2G,WAAYC,CAAgC,YAAA;AAChD,UAAIC,eAAe;AACnB,iBAAWC,UAAUF,SAAS;AACxBE,YAAAA,OAAOvI,UAAU,QAAgB,QAAA;AACjCuI,eAAOvI,UAAU,cAAWsI,eAAe;AAAA,MAAA;AAGjD,aAAIA,eAAqB,YAClB;AAAA,IACT;AAEA,QAAIE,iBAAiB;AAEfC,UAAAA,WAAW,OAAO7G,aAA6B;AAC/CyG,UAAAA;AAEA,UAAA;AACIK,cAAAA,UAAUC,OAAO,SAAS,GAE1BC,SAAS,MAAM/M,QAAQgN,KAAK,CAChCC,wBAAiB;AAAA,UACflH;AAAAA,UACAlC;AAAAA,UACAsI;AAAAA,UACAG;AAAAA,UACAY,aAAa;AAAA,UACb9I;AAAAA,UACAC;AAAAA,QACD,CAAA,GACD,IAAIrE,QAAyBC,CAC3BE,YAAAA,WAAW,MAAMF,QAAQ4M,OAAO,GAAGpJ,2BAA2B,CAChE,CAAC,CACF;AAED,YAAIsJ,WAAWF;AACb,gBAAM,IAAI/J,MACR,aAAaiD,SAASkD,GAAG,+BAA+BxF,2BAA2B,KACrF;AAGQsJ,kBAAAA,OAEP7B,IAAI,CAAC;AAAA,UAAC3F;AAAAA,UAAM,GAAGmH;AAAAA,QAAYA,MAAAA,MAAM,EAEjCtC,OAAQsC,CAAW,WAAA;AACZS,gBAAAA,cAAc1I,YAAYiI,OAAOvI,KAAK,GACtCiJ,iBACJ3I,YAAYN,KAAK,KAAiCM,YAAYG;AAChE,iBAAOuI,eAAeC;AAAAA,QAAAA,CACvB;AAAA,eACI7M,KAAK;AAMZiM,kBAAU,CACR;AAAA,UACEpJ,SAJY,8CAFdiK,OAAAA,SAAS9M,GAAG,KAAK,OAAOA,IAAI6C,WAAY,WAAW7C,IAAI6C,UAAU,eAEO;AAAA,UAKtEe,OAAO;AAAA,UACP7C,MAAM,CAAA;AAAA,QAAA,CACP;AAAA,MAAA;AAILqL;AAEMW,YAAAA,YACJhJ,cACA,GAAGA,UAAU,GAAGhD,cAAAA,QAAKrB,QACnB4D,UAAU+C,UACV,mBAAmB2G,mBAAmBxH,SAASkD,GAAG,CAAC,SAASsE,mBAC1DxH,SAASC,KACX,CAAC,EACH,CAAC;AAEInG,aAAAA,OAAO2N,WAAWjK,KAAK;AAAA,QAC5BkK,YAAY1H,SAASkD;AAAAA,QACrByE,cAAc3H,SAASC;AAAAA,QACvB2H,UAAU5H,SAAS6H;AAAAA,QACnB,GAAIN,aAAa;AAAA,UAACA;AAAAA,QAAS;AAAA,QAC3Bd;AAAAA,QACAG;AAAAA,QACAxI,OAAOoI,SAASC,OAAO;AAAA,MAAA,CACxB;AAAA,IACH;AAEMT,UAAAA,KAAKxC,aAAa,GAAGqD,UAAU;AAAA,MAACiB,aAAarK;AAAAA,IAA2B,CAAA,GAE9EqB,OAAOhF,OAAO2N,WAAWlN,IAAI;AAAA,EAAA,UACrB;AACF4L,UAAAA,gCACNF,0BAA0B;AAAA,EAAA;AAE9B;"}