ai 4.0.21 → 4.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/index.js +5 -5
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +5 -5
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../streams/index.ts","../core/index.ts","../core/data-stream/create-data-stream.ts","../core/util/prepare-response-headers.ts","../core/data-stream/create-data-stream-response.ts","../core/util/prepare-outgoing-http-headers.ts","../core/util/write-to-server-response.ts","../core/data-stream/pipe-data-stream-to-response.ts","../errors/invalid-argument-error.ts","../util/retry-with-exponential-backoff.ts","../util/delay.ts","../util/retry-error.ts","../core/prompt/prepare-retries.ts","../core/telemetry/assemble-operation-name.ts","../core/telemetry/get-base-telemetry-attributes.ts","../core/telemetry/get-tracer.ts","../core/telemetry/noop-tracer.ts","../core/telemetry/record-span.ts","../core/telemetry/select-telemetry-attributes.ts","../core/embed/embed.ts","../core/util/split-array.ts","../core/embed/embed-many.ts","../core/generate-image/generate-image.ts","../core/generate-object/generate-object.ts","../errors/no-object-generated-error.ts","../util/download-error.ts","../util/download.ts","../core/util/detect-image-mimetype.ts","../core/prompt/data-content.ts","../core/prompt/invalid-data-content-error.ts","../core/prompt/invalid-message-role-error.ts","../core/prompt/split-data-url.ts","../core/prompt/convert-to-language-model-prompt.ts","../core/prompt/prepare-call-settings.ts","../core/prompt/standardize-prompt.ts","../core/prompt/message.ts","../core/types/provider-metadata.ts","../core/types/json-value.ts","../core/prompt/content-part.ts","../core/prompt/tool-result-content.ts","../core/prompt/detect-prompt-type.ts","../core/prompt/attachments-to-parts.ts","../core/prompt/message-conversion-error.ts","../core/prompt/convert-to-core-messages.ts","../core/types/usage.ts","../core/generate-object/inject-json-instruction.ts","../core/generate-object/output-strategy.ts","../core/util/async-iterable-stream.ts","../core/generate-object/validate-object-generation-input.ts","../core/generate-object/stream-object.ts","../util/delayed-promise.ts","../util/create-resolvable-promise.ts","../core/util/create-stitchable-stream.ts","../core/util/now.ts","../core/generate-text/generate-text.ts","../errors/index.ts","../errors/invalid-tool-arguments-error.ts","../errors/no-such-tool-error.ts","../errors/tool-call-repair-error.ts","../errors/tool-execution-error.ts","../core/prompt/prepare-tools-and-tool-choice.ts","../core/util/is-non-empty-object.ts","../core/util/split-on-last-whitespace.ts","../core/util/remove-text-after-last-whitespace.ts","../core/generate-text/parse-tool-call.ts","../core/generate-text/to-response-messages.ts","../core/generate-text/output.ts","../core/generate-text/stream-text.ts","../core/util/merge-streams.ts","../core/generate-text/run-tools-transformation.ts","../core/generate-text/smooth-stream.ts","../core/middleware/wrap-language-model.ts","../core/registry/custom-provider.ts","../core/registry/no-such-provider-error.ts","../core/registry/provider-registry.ts","../core/tool/tool.ts","../core/util/cosine-similarity.ts","../streams/assistant-response.ts","../streams/langchain-adapter.ts","../streams/stream-callbacks.ts","../streams/llamaindex-adapter.ts","../streams/stream-data.ts","../util/constants.ts"],"sourcesContent":["// forwarding exports from ui-utils:\nexport {\n formatAssistantStreamPart,\n formatDataStreamPart,\n parseAssistantStreamPart,\n parseDataStreamPart,\n processDataStream,\n processTextStream,\n} from '@ai-sdk/ui-utils';\nexport type {\n AssistantMessage,\n AssistantStatus,\n Attachment,\n ChatRequest,\n ChatRequestOptions,\n CreateMessage,\n DataMessage,\n DataStreamPart,\n IdGenerator,\n JSONValue,\n Message,\n RequestOptions,\n ToolInvocation,\n UseAssistantOptions,\n} from '@ai-sdk/ui-utils';\n\nexport { generateId } from '@ai-sdk/provider-utils';\n\nexport * from '../core/index';\nexport * from '../errors/index';\n\nexport * from './assistant-response';\nexport * as LangChainAdapter from './langchain-adapter';\nexport * as LlamaIndexAdapter from './llamaindex-adapter';\nexport * from './stream-data';\n","export { jsonSchema } from '@ai-sdk/ui-utils';\nexport type { DeepPartial, Schema } from '@ai-sdk/ui-utils';\nexport * from './data-stream';\nexport * from './embed';\nexport * from './generate-image';\nexport * from './generate-object';\nexport * from './generate-text';\nexport * from './middleware';\nexport * from './prompt';\nexport * from './registry';\nexport * from './tool';\nexport * from './types';\nexport { cosineSimilarity } from './util/cosine-similarity';\n","import { DataStreamString, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function createDataStream({\n execute,\n onError = () => 'An error occurred.', // mask error messages for safety by default\n}: {\n execute: (dataStream: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n}): ReadableStream<DataStreamString> {\n let controller!: ReadableStreamDefaultController<string>;\n\n const ongoingStreamPromises: Promise<void>[] = [];\n\n const stream = new ReadableStream({\n start(controllerArg) {\n controller = controllerArg;\n },\n });\n\n function safeEnqueue(data: DataStreamString) {\n try {\n controller.enqueue(data);\n } catch (error) {\n // suppress errors when the stream has been closed\n }\n }\n\n try {\n const result = execute({\n writeData(data) {\n safeEnqueue(formatDataStreamPart('data', [data]));\n },\n writeMessageAnnotation(annotation) {\n safeEnqueue(formatDataStreamPart('message_annotations', [annotation]));\n },\n merge(streamArg) {\n ongoingStreamPromises.push(\n (async () => {\n const reader = streamArg.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n safeEnqueue(value);\n }\n })().catch(error => {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }),\n );\n },\n onError,\n });\n\n if (result) {\n ongoingStreamPromises.push(\n result.catch(error => {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }),\n );\n }\n } catch (error) {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }\n\n // Wait until all ongoing streams are done. This approach enables merging\n // streams even after execute has returned, as long as there is still an\n // open merged stream. This is important to e.g. forward new streams and\n // from callbacks.\n const waitForStreams: Promise<void> = new Promise(async resolve => {\n while (ongoingStreamPromises.length > 0) {\n await ongoingStreamPromises.shift();\n }\n resolve();\n });\n\n waitForStreams.finally(() => {\n try {\n controller.close();\n } catch (error) {\n // suppress errors when the stream has been closed\n }\n });\n\n return stream;\n}\n","export function prepareResponseHeaders(\n headers: HeadersInit | undefined,\n {\n contentType,\n dataStreamVersion,\n }: { contentType: string; dataStreamVersion?: 'v1' | undefined },\n) {\n const responseHeaders = new Headers(headers ?? {});\n\n if (!responseHeaders.has('Content-Type')) {\n responseHeaders.set('Content-Type', contentType);\n }\n\n if (dataStreamVersion !== undefined) {\n responseHeaders.set('X-Vercel-AI-Data-Stream', dataStreamVersion);\n }\n\n return responseHeaders;\n}\n","import { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { createDataStream } from './create-data-stream';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function createDataStreamResponse({\n status,\n statusText,\n headers,\n execute,\n onError,\n}: ResponseInit & {\n execute: (dataStream: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n}): Response {\n return new Response(\n createDataStream({ execute, onError }).pipeThrough(new TextEncoderStream()),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n },\n );\n}\n","export function prepareOutgoingHttpHeaders(\n headers: HeadersInit | undefined,\n {\n contentType,\n dataStreamVersion,\n }: { contentType: string; dataStreamVersion?: 'v1' | undefined },\n) {\n const outgoingHeaders: Record<string, string | number | string[]> = {};\n\n if (headers != null) {\n for (const [key, value] of Object.entries(headers)) {\n outgoingHeaders[key] = value;\n }\n }\n\n if (outgoingHeaders['Content-Type'] == null) {\n outgoingHeaders['Content-Type'] = contentType;\n }\n\n if (dataStreamVersion !== undefined) {\n outgoingHeaders['X-Vercel-AI-Data-Stream'] = dataStreamVersion;\n }\n\n return outgoingHeaders;\n}\n","import { ServerResponse } from 'node:http';\n\n/**\n * Writes the content of a stream to a server response.\n */\nexport function writeToServerResponse({\n response,\n status,\n statusText,\n headers,\n stream,\n}: {\n response: ServerResponse;\n status?: number;\n statusText?: string;\n headers?: Record<string, string | number | string[]>;\n stream: ReadableStream<Uint8Array>;\n}): void {\n response.writeHead(status ?? 200, statusText, headers);\n\n const reader = stream.getReader();\n const read = async () => {\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n response.write(value);\n }\n } catch (error) {\n throw error;\n } finally {\n response.end();\n }\n };\n\n read();\n}\n","import { ServerResponse } from 'node:http';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport { createDataStream } from './create-data-stream';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function pipeDataStreamToResponse(\n response: ServerResponse,\n {\n status,\n statusText,\n headers,\n execute,\n onError,\n }: ResponseInit & {\n execute: (writer: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n },\n): void {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n stream: createDataStream({ execute, onError }).pipeThrough(\n new TextEncoderStream(),\n ),\n });\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidArgumentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidArgumentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly parameter: string;\n readonly value: unknown;\n\n constructor({\n parameter,\n value,\n message,\n }: {\n parameter: string;\n value: unknown;\n message: string;\n }) {\n super({\n name,\n message: `Invalid argument for parameter ${parameter}: ${message}`,\n });\n\n this.parameter = parameter;\n this.value = value;\n }\n\n static isInstance(error: unknown): error is InvalidArgumentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { APICallError } from '@ai-sdk/provider';\nimport { getErrorMessage, isAbortError } from '@ai-sdk/provider-utils';\nimport { delay } from './delay';\nimport { RetryError } from './retry-error';\n\nexport type RetryFunction = <OUTPUT>(\n fn: () => PromiseLike<OUTPUT>,\n) => PromiseLike<OUTPUT>;\n\n/**\nThe `retryWithExponentialBackoff` strategy retries a failed API call with an exponential backoff.\nYou can configure the maximum number of retries, the initial delay, and the backoff factor.\n */\nexport const retryWithExponentialBackoff =\n ({\n maxRetries = 2,\n initialDelayInMs = 2000,\n backoffFactor = 2,\n } = {}): RetryFunction =>\n async <OUTPUT>(f: () => PromiseLike<OUTPUT>) =>\n _retryWithExponentialBackoff(f, {\n maxRetries,\n delayInMs: initialDelayInMs,\n backoffFactor,\n });\n\nasync function _retryWithExponentialBackoff<OUTPUT>(\n f: () => PromiseLike<OUTPUT>,\n {\n maxRetries,\n delayInMs,\n backoffFactor,\n }: { maxRetries: number; delayInMs: number; backoffFactor: number },\n errors: unknown[] = [],\n): Promise<OUTPUT> {\n try {\n return await f();\n } catch (error) {\n if (isAbortError(error)) {\n throw error; // don't retry when the request was aborted\n }\n\n if (maxRetries === 0) {\n throw error; // don't wrap the error when retries are disabled\n }\n\n const errorMessage = getErrorMessage(error);\n const newErrors = [...errors, error];\n const tryNumber = newErrors.length;\n\n if (tryNumber > maxRetries) {\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,\n reason: 'maxRetriesExceeded',\n errors: newErrors,\n });\n }\n\n if (\n error instanceof Error &&\n APICallError.isInstance(error) &&\n error.isRetryable === true &&\n tryNumber <= maxRetries\n ) {\n await delay(delayInMs);\n return _retryWithExponentialBackoff(\n f,\n { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },\n newErrors,\n );\n }\n\n if (tryNumber === 1) {\n throw error; // don't wrap the error when a non-retryable error occurs on the first try\n }\n\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,\n reason: 'errorNotRetryable',\n errors: newErrors,\n });\n }\n}\n","export async function delay(delayInMs?: number): Promise<void> {\n return delayInMs === undefined\n ? Promise.resolve()\n : new Promise(resolve => setTimeout(resolve, delayInMs));\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_RetryError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport type RetryErrorReason =\n | 'maxRetriesExceeded'\n | 'errorNotRetryable'\n | 'abort';\n\nexport class RetryError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n // note: property order determines debugging output\n readonly reason: RetryErrorReason;\n readonly lastError: unknown;\n readonly errors: Array<unknown>;\n\n constructor({\n message,\n reason,\n errors,\n }: {\n message: string;\n reason: RetryErrorReason;\n errors: Array<unknown>;\n }) {\n super({ name, message });\n\n this.reason = reason;\n this.errors = errors;\n\n // separate our last error to make debugging via log easier:\n this.lastError = errors[errors.length - 1];\n }\n\n static isInstance(error: unknown): error is RetryError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport {\n RetryFunction,\n retryWithExponentialBackoff,\n} from '../../util/retry-with-exponential-backoff';\n\n/**\n * Validate and prepare retries.\n */\nexport function prepareRetries({\n maxRetries,\n}: {\n maxRetries: number | undefined;\n}): {\n maxRetries: number;\n retry: RetryFunction;\n} {\n if (maxRetries != null) {\n if (!Number.isInteger(maxRetries)) {\n throw new InvalidArgumentError({\n parameter: 'maxRetries',\n value: maxRetries,\n message: 'maxRetries must be an integer',\n });\n }\n\n if (maxRetries < 0) {\n throw new InvalidArgumentError({\n parameter: 'maxRetries',\n value: maxRetries,\n message: 'maxRetries must be >= 0',\n });\n }\n }\n\n const maxRetriesResult = maxRetries ?? 2;\n\n return {\n maxRetries: maxRetriesResult,\n retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult }),\n };\n}\n","import { TelemetrySettings } from './telemetry-settings';\n\nexport function assembleOperationName({\n operationId,\n telemetry,\n}: {\n operationId: string;\n telemetry?: TelemetrySettings;\n}) {\n return {\n // standardized operation and resource name:\n 'operation.name': `${operationId}${\n telemetry?.functionId != null ? ` ${telemetry.functionId}` : ''\n }`,\n 'resource.name': telemetry?.functionId,\n\n // detailed, AI SDK specific data:\n 'ai.operationId': operationId,\n 'ai.telemetry.functionId': telemetry?.functionId,\n };\n}\n","import { Attributes } from '@opentelemetry/api';\nimport { CallSettings } from '../prompt/call-settings';\nimport { TelemetrySettings } from './telemetry-settings';\n\nexport function getBaseTelemetryAttributes({\n model,\n settings,\n telemetry,\n headers,\n}: {\n model: { modelId: string; provider: string };\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n}): Attributes {\n return {\n 'ai.model.provider': model.provider,\n 'ai.model.id': model.modelId,\n\n // settings:\n ...Object.entries(settings).reduce((attributes, [key, value]) => {\n attributes[`ai.settings.${key}`] = value;\n return attributes;\n }, {} as Attributes),\n\n // add metadata as attributes:\n ...Object.entries(telemetry?.metadata ?? {}).reduce(\n (attributes, [key, value]) => {\n attributes[`ai.telemetry.metadata.${key}`] = value;\n return attributes;\n },\n {} as Attributes,\n ),\n\n // request headers\n ...Object.entries(headers ?? {}).reduce((attributes, [key, value]) => {\n if (value !== undefined) {\n attributes[`ai.request.headers.${key}`] = value;\n }\n return attributes;\n }, {} as Attributes),\n };\n}\n","import { Tracer, trace } from '@opentelemetry/api';\nimport { noopTracer } from './noop-tracer';\n\nexport function getTracer({\n isEnabled = false,\n tracer,\n}: {\n isEnabled?: boolean;\n tracer?: Tracer;\n} = {}): Tracer {\n if (!isEnabled) {\n return noopTracer;\n }\n\n if (tracer) {\n return tracer;\n }\n\n return trace.getTracer('ai');\n}\n","import { Span, SpanContext, Tracer } from '@opentelemetry/api';\n\n/**\n * Tracer implementation that does nothing (null object).\n */\nexport const noopTracer: Tracer = {\n startSpan(): Span {\n return noopSpan;\n },\n\n startActiveSpan<F extends (span: Span) => unknown>(\n name: unknown,\n arg1: unknown,\n arg2?: unknown,\n arg3?: F,\n ): ReturnType<any> {\n if (typeof arg1 === 'function') {\n return arg1(noopSpan);\n }\n if (typeof arg2 === 'function') {\n return arg2(noopSpan);\n }\n if (typeof arg3 === 'function') {\n return arg3(noopSpan);\n }\n },\n};\n\nconst noopSpan: Span = {\n spanContext() {\n return noopSpanContext;\n },\n setAttribute() {\n return this;\n },\n setAttributes() {\n return this;\n },\n addEvent() {\n return this;\n },\n addLink() {\n return this;\n },\n addLinks() {\n return this;\n },\n setStatus() {\n return this;\n },\n updateName() {\n return this;\n },\n end() {\n return this;\n },\n isRecording() {\n return false;\n },\n recordException() {\n return this;\n },\n};\n\nconst noopSpanContext: SpanContext = {\n traceId: '',\n spanId: '',\n traceFlags: 0,\n};\n","import { Attributes, Span, Tracer, SpanStatusCode } from '@opentelemetry/api';\n\nexport function recordSpan<T>({\n name,\n tracer,\n attributes,\n fn,\n endWhenDone = true,\n}: {\n name: string;\n tracer: Tracer;\n attributes: Attributes;\n fn: (span: Span) => Promise<T>;\n endWhenDone?: boolean;\n}) {\n return tracer.startActiveSpan(name, { attributes }, async span => {\n try {\n const result = await fn(span);\n\n if (endWhenDone) {\n span.end();\n }\n\n return result;\n } catch (error) {\n try {\n if (error instanceof Error) {\n span.recordException({\n name: error.name,\n message: error.message,\n stack: error.stack,\n });\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message,\n });\n } else {\n span.setStatus({ code: SpanStatusCode.ERROR });\n }\n } finally {\n // always stop the span when there is an error:\n span.end();\n }\n\n throw error;\n }\n });\n}\n","import type { Attributes, AttributeValue } from '@opentelemetry/api';\nimport type { TelemetrySettings } from './telemetry-settings';\n\nexport function selectTelemetryAttributes({\n telemetry,\n attributes,\n}: {\n telemetry?: TelemetrySettings;\n attributes: {\n [attributeKey: string]:\n | AttributeValue\n | { input: () => AttributeValue | undefined }\n | { output: () => AttributeValue | undefined }\n | undefined;\n };\n}): Attributes {\n // when telemetry is disabled, return an empty object to avoid serialization overhead:\n if (telemetry?.isEnabled !== true) {\n return {};\n }\n\n return Object.entries(attributes).reduce((attributes, [key, value]) => {\n if (value === undefined) {\n return attributes;\n }\n\n // input value, check if it should be recorded:\n if (\n typeof value === 'object' &&\n 'input' in value &&\n typeof value.input === 'function'\n ) {\n // default to true:\n if (telemetry?.recordInputs === false) {\n return attributes;\n }\n\n const result = value.input();\n\n return result === undefined\n ? attributes\n : { ...attributes, [key]: result };\n }\n\n // output value, check if it should be recorded:\n if (\n typeof value === 'object' &&\n 'output' in value &&\n typeof value.output === 'function'\n ) {\n // default to true:\n if (telemetry?.recordOutputs === false) {\n return attributes;\n }\n\n const result = value.output();\n\n return result === undefined\n ? attributes\n : { ...attributes, [key]: result };\n }\n\n // value is an attribute value already:\n return { ...attributes, [key]: value };\n }, {});\n}\n","import { prepareRetries } from '../prompt/prepare-retries';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { EmbeddingModel } from '../types';\nimport { EmbedResult } from './embed-result';\n\n/**\nEmbed a value using an embedding model. The type of the value is defined by the embedding model.\n\n@param model - The embedding model to use.\n@param value - The value that should be embedded.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the embedding, the value, and additional information.\n */\nexport async function embed<VALUE>({\n model,\n value,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n}: {\n /**\nThe embedding model to use.\n */\n model: EmbeddingModel<VALUE>;\n\n /**\nThe value that should be embedded.\n */\n value: VALUE;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n\n /**\n * Optional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n}): Promise<EmbedResult<VALUE>> {\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.embed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.embed', telemetry }),\n ...baseTelemetryAttributes,\n 'ai.value': { input: () => JSON.stringify(value) },\n },\n }),\n tracer,\n fn: async span => {\n const { embedding, usage, rawResponse } = await retry(() =>\n // nested spans to align with the embedMany telemetry data:\n recordSpan({\n name: 'ai.embed.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embed.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': { input: () => [JSON.stringify(value)] },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values: [value],\n abortSignal,\n headers,\n });\n\n const embedding = modelResponse.embeddings[0];\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n modelResponse.embeddings.map(embedding =>\n JSON.stringify(embedding),\n ),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return {\n embedding,\n usage,\n rawResponse: modelResponse.rawResponse,\n };\n },\n }),\n );\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embedding': { output: () => JSON.stringify(embedding) },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return new DefaultEmbedResult({ value, embedding, usage, rawResponse });\n },\n });\n}\n\nclass DefaultEmbedResult<VALUE> implements EmbedResult<VALUE> {\n readonly value: EmbedResult<VALUE>['value'];\n readonly embedding: EmbedResult<VALUE>['embedding'];\n readonly usage: EmbedResult<VALUE>['usage'];\n readonly rawResponse: EmbedResult<VALUE>['rawResponse'];\n\n constructor(options: {\n value: EmbedResult<VALUE>['value'];\n embedding: EmbedResult<VALUE>['embedding'];\n usage: EmbedResult<VALUE>['usage'];\n rawResponse?: EmbedResult<VALUE>['rawResponse'];\n }) {\n this.value = options.value;\n this.embedding = options.embedding;\n this.usage = options.usage;\n this.rawResponse = options.rawResponse;\n }\n}\n","/**\n * Splits an array into chunks of a specified size.\n *\n * @template T - The type of elements in the array.\n * @param {T[]} array - The array to split.\n * @param {number} chunkSize - The size of each chunk.\n * @returns {T[][]} - A new array containing the chunks.\n */\nexport function splitArray<T>(array: T[], chunkSize: number): T[][] {\n if (chunkSize <= 0) {\n throw new Error('chunkSize must be greater than 0');\n }\n\n const result = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n result.push(array.slice(i, i + chunkSize));\n }\n\n return result;\n}\n","import { prepareRetries } from '../prompt/prepare-retries';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { Embedding, EmbeddingModel } from '../types';\nimport { splitArray } from '../util/split-array';\nimport { EmbedManyResult } from './embed-many-result';\n\n/**\nEmbed several values using an embedding model. The type of the value is defined\nby the embedding model.\n\n`embedMany` automatically splits large requests into smaller chunks if the model\nhas a limit on how many embeddings can be generated in a single call.\n\n@param model - The embedding model to use.\n@param values - The values that should be embedded.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the embeddings, the value, and additional information.\n */\nexport async function embedMany<VALUE>({\n model,\n values,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n}: {\n /**\nThe embedding model to use.\n */\n model: EmbeddingModel<VALUE>;\n\n /**\nThe values that should be embedded.\n */\n values: Array<VALUE>;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n\n /**\n * Optional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n}): Promise<EmbedManyResult<VALUE>> {\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.embedMany',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.embedMany', telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => values.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async span => {\n const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;\n\n // the model has not specified limits on\n // how many embeddings can be generated in a single call\n if (maxEmbeddingsPerCall == null) {\n const { embeddings, usage } = await retry(() => {\n // nested spans to align with the embedMany telemetry data:\n return recordSpan({\n name: 'ai.embedMany.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embedMany.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => values.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values,\n abortSignal,\n headers,\n });\n\n const embeddings = modelResponse.embeddings;\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return { embeddings, usage };\n },\n });\n });\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return new DefaultEmbedManyResult({ values, embeddings, usage });\n }\n\n // split the values into chunks that are small enough for the model:\n const valueChunks = splitArray(values, maxEmbeddingsPerCall);\n\n // serially embed the chunks:\n const embeddings: Array<Embedding> = [];\n let tokens = 0;\n\n for (const chunk of valueChunks) {\n const { embeddings: responseEmbeddings, usage } = await retry(() => {\n // nested spans to align with the embedMany telemetry data:\n return recordSpan({\n name: 'ai.embedMany.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embedMany.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => chunk.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values: chunk,\n abortSignal,\n headers,\n });\n\n const embeddings = modelResponse.embeddings;\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return { embeddings, usage };\n },\n });\n });\n\n embeddings.push(...responseEmbeddings);\n tokens += usage.tokens;\n }\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': tokens,\n },\n }),\n );\n\n return new DefaultEmbedManyResult({\n values,\n embeddings,\n usage: { tokens },\n });\n },\n });\n}\n\nclass DefaultEmbedManyResult<VALUE> implements EmbedManyResult<VALUE> {\n readonly values: EmbedManyResult<VALUE>['values'];\n readonly embeddings: EmbedManyResult<VALUE>['embeddings'];\n readonly usage: EmbedManyResult<VALUE>['usage'];\n\n constructor(options: {\n values: EmbedManyResult<VALUE>['values'];\n embeddings: EmbedManyResult<VALUE>['embeddings'];\n usage: EmbedManyResult<VALUE>['usage'];\n }) {\n this.values = options.values;\n this.embeddings = options.embeddings;\n this.usage = options.usage;\n }\n}\n","import { ImageModelV1, JSONValue } from '@ai-sdk/provider';\nimport { convertBase64ToUint8Array } from '@ai-sdk/provider-utils';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { GeneratedImage, GenerateImageResult } from './generate-image-result';\n\n/**\nGenerates images using an image model.\n\n@param model - The image model to use.\n@param prompt - The prompt that should be used to generate the image.\n@param n - Number of images to generate. Default: 1.\n@param size - Size of the images to generate. Must have the format `{width}x{height}`.\n@param providerOptions - Additional provider-specific options that are passed through to the provider\nas body parameters.\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the generated images.\n */\nexport async function generateImage({\n model,\n prompt,\n n,\n size,\n providerOptions,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n}: {\n /**\nThe image model to use.\n */\n model: ImageModelV1;\n\n /**\nThe prompt that should be used to generate the image.\n */\n prompt: string;\n\n /**\nNumber of images to generate.\n */\n n?: number;\n\n /**\nSize of the images to generate. Must have the format `{width}x{height}`.\n */\n size?: `${number}x${number}`;\n\n /**\nAdditional provider-specific options that are passed through to the provider\nas body parameters.\n\nThe outer record is keyed by the provider name, and the inner\nrecord is keyed by the provider-specific metadata key.\n```ts\n{\n \"openai\": {\n \"style\": \"vivid\"\n }\n}\n```\n */\n providerOptions?: Record<string, Record<string, JSONValue>>;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n}): Promise<GenerateImageResult> {\n const { retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const { images } = await retry(() =>\n model.doGenerate({\n prompt,\n n: n ?? 1,\n abortSignal,\n headers,\n size,\n providerOptions: providerOptions ?? {},\n }),\n );\n\n return new DefaultGenerateImageResult({ base64Images: images });\n}\n\nclass DefaultGenerateImageResult implements GenerateImageResult {\n readonly images: Array<GeneratedImage>;\n\n constructor(options: { base64Images: Array<string> }) {\n this.images = options.base64Images.map(base64 => ({\n base64,\n get uint8Array() {\n return convertBase64ToUint8Array(this.base64);\n },\n }));\n }\n\n get image() {\n return this.images[0];\n }\n}\n","import { JSONValue } from '@ai-sdk/provider';\nimport { createIdGenerator, safeParseJSON } from '@ai-sdk/provider-utils';\nimport { Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport {\n CallWarning,\n FinishReason,\n LanguageModel,\n LogProbs,\n ProviderMetadata,\n} from '../types';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { calculateLanguageModelUsage } from '../types/usage';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { GenerateObjectResult } from './generate-object-result';\nimport { injectJsonInstruction } from './inject-json-instruction';\nimport { getOutputStrategy } from './output-strategy';\nimport { validateObjectGenerationInput } from './validate-object-generation-input';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });\n\n/**\nGenerate a structured, typed object for a given prompt and schema using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@returns\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<OBJECT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output?: 'object' | undefined;\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe schema of the object that the model should generate.\n */\n schema: z.Schema<OBJECT, z.ZodTypeDef, any> | Schema<OBJECT>;\n\n /**\nOptional name of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<OBJECT>>;\n/**\nGenerate an array with structured, typed elements for a given prompt and element schema using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@return\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<ELEMENT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'array';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe element schema of the array that the model should generate.\n */\n schema: z.Schema<ELEMENT, z.ZodTypeDef, any> | Schema<ELEMENT>;\n\n /**\nOptional name of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<Array<ELEMENT>>>;\n/**\nGenerate a value from an enum (limited list of string values) using a language model.\n\nThis function does not stream the output.\n\n@return\nA result object that contains the generated value, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<ENUM extends string>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'enum';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe enum values that the model should use.\n */\n enum: Array<ENUM>;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<ENUM>>;\n/**\nGenerate JSON with any schema for a given prompt using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@returns\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'no-schema';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe mode to use for object generation. Must be \"json\" for no-schema output.\n */\n mode?: 'json';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<JSONValue>>;\nexport async function generateObject<SCHEMA, RESULT>({\n model,\n enum: enumValues, // rename bc enum is reserved by typescript\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = 'object',\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n ...settings\n}: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n /**\n * The expected structure of the output.\n *\n * - 'object': Generate a single object that conforms to the schema.\n * - 'array': Generate an array of objects that conform to the schema.\n * - 'no-schema': Generate any JSON object. No schema is specified.\n *\n * Default is 'object' if not specified.\n */\n output?: 'object' | 'array' | 'enum' | 'no-schema';\n\n model: LanguageModel;\n enum?: Array<SCHEMA>;\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n schemaName?: string;\n schemaDescription?: string;\n mode?: 'auto' | 'json' | 'tool';\n experimental_telemetry?: TelemetrySettings;\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): Promise<GenerateObjectResult<RESULT>> {\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n enumValues,\n });\n\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const outputStrategy = getOutputStrategy({\n output,\n schema: inputSchema,\n enumValues,\n });\n\n // automatically set mode to 'json' for no-schema output\n if (outputStrategy.type === 'no-schema' && mode === undefined) {\n mode = 'json';\n }\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.generateObject',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.schema':\n outputStrategy.jsonSchema != null\n ? { input: () => JSON.stringify(outputStrategy.jsonSchema) }\n : undefined,\n 'ai.schema.name': schemaName,\n 'ai.schema.description': schemaDescription,\n 'ai.settings.output': outputStrategy.type,\n 'ai.settings.mode': mode,\n },\n }),\n tracer,\n fn: async span => {\n // use the default provider mode when the mode is set to 'auto' or unspecified\n if (mode === 'auto' || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n\n let result: string;\n let finishReason: FinishReason;\n let usage: Parameters<typeof calculateLanguageModelUsage>[0];\n let warnings: CallWarning[] | undefined;\n let rawResponse: { headers?: Record<string, string> } | undefined;\n let response: LanguageModelResponseMetadata;\n let request: LanguageModelRequestMetadata;\n let logprobs: LogProbs | undefined;\n let resultProviderMetadata: ProviderMetadata | undefined;\n\n switch (mode) {\n case 'json': {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system:\n outputStrategy.jsonSchema == null\n ? injectJsonInstruction({ prompt: system })\n : model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema,\n }),\n prompt,\n messages,\n },\n tools: undefined,\n });\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n const generateResult = await retry(() =>\n recordSpan({\n name: 'ai.generateObject.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => standardizedPrompt.type,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode: {\n type: 'object-json',\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription,\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n if (result.text === undefined) {\n throw new NoObjectGeneratedError({\n message:\n 'No object generated: the model did not return a response.',\n response: responseData,\n usage: calculateLanguageModelUsage(result.usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.object': { output: () => result.text },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens':\n result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.prompt_tokens': result.usage.promptTokens,\n 'gen_ai.usage.completion_tokens':\n result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, objectText: result.text, responseData };\n },\n }),\n );\n\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = generateResult.request ?? {};\n response = generateResult.responseData;\n\n break;\n }\n\n case 'tool': {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: undefined,\n });\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n const inputFormat = standardizedPrompt.type;\n\n const generateResult = await retry(() =>\n recordSpan({\n name: 'ai.generateObject.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => inputFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode: {\n type: 'object-tool',\n tool: {\n type: 'function',\n name: schemaName ?? 'json',\n description:\n schemaDescription ?? 'Respond with a JSON object.',\n parameters: outputStrategy.jsonSchema!,\n },\n },\n ...prepareCallSettings(settings),\n inputFormat,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n const objectText = result.toolCalls?.[0]?.args;\n\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n if (objectText === undefined) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: the tool was not called.',\n response: responseData,\n usage: calculateLanguageModelUsage(result.usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.object': { output: () => objectText },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens':\n result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.input_tokens': result.usage.promptTokens,\n 'gen_ai.usage.output_tokens':\n result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, objectText, responseData };\n },\n }),\n );\n\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = generateResult.request ?? {};\n response = generateResult.responseData;\n\n break;\n }\n\n case undefined: {\n throw new Error(\n 'Model does not have a default object generation mode.',\n );\n }\n\n default: {\n const _exhaustiveCheck: never = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n\n const parseResult = safeParseJSON({ text: result });\n\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: could not parse the response.',\n cause: parseResult.error,\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n });\n }\n\n const validationResult = outputStrategy.validateFinalResult(\n parseResult.value,\n {\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n },\n );\n\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.object': {\n output: () => JSON.stringify(validationResult.value),\n },\n\n 'ai.usage.promptTokens': usage.promptTokens,\n 'ai.usage.completionTokens': usage.completionTokens,\n },\n }),\n );\n\n return new DefaultGenerateObjectResult({\n object: validationResult.value,\n finishReason,\n usage: calculateLanguageModelUsage(usage),\n warnings,\n request,\n response: {\n ...response,\n headers: rawResponse?.headers,\n },\n logprobs,\n providerMetadata: resultProviderMetadata,\n });\n },\n });\n}\n\nclass DefaultGenerateObjectResult<T> implements GenerateObjectResult<T> {\n readonly object: GenerateObjectResult<T>['object'];\n readonly finishReason: GenerateObjectResult<T>['finishReason'];\n readonly usage: GenerateObjectResult<T>['usage'];\n readonly warnings: GenerateObjectResult<T>['warnings'];\n readonly logprobs: GenerateObjectResult<T>['logprobs'];\n readonly experimental_providerMetadata: GenerateObjectResult<T>['experimental_providerMetadata'];\n readonly response: GenerateObjectResult<T>['response'];\n readonly request: GenerateObjectResult<T>['request'];\n\n constructor(options: {\n object: GenerateObjectResult<T>['object'];\n finishReason: GenerateObjectResult<T>['finishReason'];\n usage: GenerateObjectResult<T>['usage'];\n warnings: GenerateObjectResult<T>['warnings'];\n logprobs: GenerateObjectResult<T>['logprobs'];\n providerMetadata: GenerateObjectResult<T>['experimental_providerMetadata'];\n response: GenerateObjectResult<T>['response'];\n request: GenerateObjectResult<T>['request'];\n }) {\n this.object = options.object;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.experimental_providerMetadata = options.providerMetadata;\n this.response = options.response;\n this.request = options.request;\n this.logprobs = options.logprobs;\n }\n\n toJsonResponse(init?: ResponseInit): Response {\n return new Response(JSON.stringify(this.object), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'application/json; charset=utf-8',\n }),\n });\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\nimport { LanguageModelResponseMetadata } from '../core/types/language-model-response-metadata';\nimport { LanguageModelUsage } from '../core/types/usage';\n\nconst name = 'AI_NoObjectGeneratedError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\nThrown when no object could be generated. This can have several causes:\n\n- The model failed to generate a response.\n- The model generated a response that could not be parsed.\n- The model generated a response that could not be validated against the schema.\n\nThe error contains the following properties:\n\n- `text`: The text that was generated by the model. This can be the raw text or the tool call text, depending on the model.\n */\nexport class NoObjectGeneratedError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n /**\n The text that was generated by the model. This can be the raw text or the tool call text, depending on the model.\n */\n readonly text: string | undefined;\n\n /**\n The response metadata.\n */\n readonly response: LanguageModelResponseMetadata | undefined;\n\n /**\n The usage of the model.\n */\n readonly usage: LanguageModelUsage | undefined;\n\n constructor({\n message = 'No object generated.',\n cause,\n text,\n response,\n usage,\n }: {\n message?: string;\n cause?: Error;\n text?: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n }) {\n super({ name, message, cause });\n\n this.text = text;\n this.response = response;\n this.usage = usage;\n }\n\n static isInstance(error: unknown): error is NoObjectGeneratedError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n\nexport function verifyNoObjectGeneratedError(\n error: unknown,\n expected: {\n message: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n) {\n expect(NoObjectGeneratedError.isInstance(error)).toBeTruthy();\n const noObjectGeneratedError = error as NoObjectGeneratedError;\n expect(noObjectGeneratedError.message).toStrictEqual(expected.message);\n expect(noObjectGeneratedError.response).toStrictEqual(expected.response);\n expect(noObjectGeneratedError.usage).toStrictEqual(expected.usage);\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_DownloadError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class DownloadError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly url: string;\n readonly statusCode?: number;\n readonly statusText?: string;\n\n constructor({\n url,\n statusCode,\n statusText,\n cause,\n message = cause == null\n ? `Failed to download ${url}: ${statusCode} ${statusText}`\n : `Failed to download ${url}: ${cause}`,\n }: {\n url: string;\n statusCode?: number;\n statusText?: string;\n message?: string;\n cause?: unknown;\n }) {\n super({ name, message, cause });\n\n this.url = url;\n this.statusCode = statusCode;\n this.statusText = statusText;\n }\n\n static isInstance(error: unknown): error is DownloadError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { DownloadError } from './download-error';\n\nexport async function download({\n url,\n fetchImplementation = fetch,\n}: {\n url: URL;\n fetchImplementation?: typeof fetch;\n}): Promise<{\n data: Uint8Array;\n mimeType: string | undefined;\n}> {\n const urlText = url.toString();\n try {\n const response = await fetchImplementation(urlText);\n\n if (!response.ok) {\n throw new DownloadError({\n url: urlText,\n statusCode: response.status,\n statusText: response.statusText,\n });\n }\n\n return {\n data: new Uint8Array(await response.arrayBuffer()),\n mimeType: response.headers.get('content-type') ?? undefined,\n };\n } catch (error) {\n if (DownloadError.isInstance(error)) {\n throw error;\n }\n\n throw new DownloadError({ url: urlText, cause: error });\n }\n}\n","const mimeTypeSignatures = [\n { mimeType: 'image/gif' as const, bytes: [0x47, 0x49, 0x46] },\n { mimeType: 'image/png' as const, bytes: [0x89, 0x50, 0x4e, 0x47] },\n { mimeType: 'image/jpeg' as const, bytes: [0xff, 0xd8] },\n { mimeType: 'image/webp' as const, bytes: [0x52, 0x49, 0x46, 0x46] },\n];\n\nexport function detectImageMimeType(\n image: Uint8Array,\n): 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | undefined {\n for (const { bytes, mimeType } of mimeTypeSignatures) {\n if (\n image.length >= bytes.length &&\n bytes.every((byte, index) => image[index] === byte)\n ) {\n return mimeType;\n }\n }\n\n return undefined;\n}\n","import {\n convertBase64ToUint8Array,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\nimport { InvalidDataContentError } from './invalid-data-content-error';\nimport { z } from 'zod';\n\n/**\nData content. Can either be a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer.\n */\nexport type DataContent = string | Uint8Array | ArrayBuffer | Buffer;\n\n/**\n@internal\n */\nexport const dataContentSchema: z.ZodType<DataContent> = z.union([\n z.string(),\n z.instanceof(Uint8Array),\n z.instanceof(ArrayBuffer),\n z.custom(\n // Buffer might not be available in some environments such as CloudFlare:\n (value: unknown): value is Buffer =>\n globalThis.Buffer?.isBuffer(value) ?? false,\n { message: 'Must be a Buffer' },\n ),\n]);\n\n/**\nConverts data content to a base64-encoded string.\n\n@param content - Data content to convert.\n@returns Base64-encoded string.\n*/\nexport function convertDataContentToBase64String(content: DataContent): string {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content instanceof ArrayBuffer) {\n return convertUint8ArrayToBase64(new Uint8Array(content));\n }\n\n return convertUint8ArrayToBase64(content);\n}\n\n/**\nConverts data content to a Uint8Array.\n\n@param content - Data content to convert.\n@returns Uint8Array.\n */\nexport function convertDataContentToUint8Array(\n content: DataContent,\n): Uint8Array {\n if (content instanceof Uint8Array) {\n return content;\n }\n\n if (typeof content === 'string') {\n try {\n return convertBase64ToUint8Array(content);\n } catch (error) {\n throw new InvalidDataContentError({\n message:\n 'Invalid data content. Content string is not a base64-encoded media.',\n content,\n cause: error,\n });\n }\n }\n\n if (content instanceof ArrayBuffer) {\n return new Uint8Array(content);\n }\n\n throw new InvalidDataContentError({ content });\n}\n\n/**\n * Converts a Uint8Array to a string of text.\n *\n * @param uint8Array - The Uint8Array to convert.\n * @returns The converted string.\n */\nexport function convertUint8ArrayToText(uint8Array: Uint8Array): string {\n try {\n return new TextDecoder().decode(uint8Array);\n } catch (error) {\n throw new Error('Error decoding Uint8Array to text');\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidDataContentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidDataContentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly content: unknown;\n\n constructor({\n content,\n cause,\n message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`,\n }: {\n content: unknown;\n cause?: unknown;\n message?: string;\n }) {\n super({ name, message, cause });\n\n this.content = content;\n }\n\n static isInstance(error: unknown): error is InvalidDataContentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidMessageRoleError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidMessageRoleError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly role: string;\n\n constructor({\n role,\n message = `Invalid message role: '${role}'. Must be one of: \"system\", \"user\", \"assistant\", \"tool\".`,\n }: {\n role: string;\n message?: string;\n }) {\n super({ name, message });\n\n this.role = role;\n }\n\n static isInstance(error: unknown): error is InvalidMessageRoleError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","export function splitDataUrl(dataUrl: string): {\n mimeType: string | undefined;\n base64Content: string | undefined;\n} {\n try {\n const [header, base64Content] = dataUrl.split(',');\n return {\n mimeType: header.split(';')[0].split(':')[1],\n base64Content,\n };\n } catch (error) {\n return {\n mimeType: undefined,\n base64Content: undefined,\n };\n }\n}\n","import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1Prompt,\n LanguageModelV1TextPart,\n} from '@ai-sdk/provider';\nimport { download } from '../../util/download';\nimport { CoreMessage } from '../prompt/message';\nimport { detectImageMimeType } from '../util/detect-image-mimetype';\nimport { FilePart, ImagePart, TextPart } from './content-part';\nimport {\n convertDataContentToBase64String,\n convertDataContentToUint8Array,\n DataContent,\n} from './data-content';\nimport { InvalidMessageRoleError } from './invalid-message-role-error';\nimport { splitDataUrl } from './split-data-url';\nimport { StandardizedPrompt } from './standardize-prompt';\n\nexport async function convertToLanguageModelPrompt({\n prompt,\n modelSupportsImageUrls = true,\n modelSupportsUrl = () => false,\n downloadImplementation = download,\n}: {\n prompt: StandardizedPrompt;\n modelSupportsImageUrls: boolean | undefined;\n modelSupportsUrl: undefined | ((url: URL) => boolean);\n downloadImplementation?: typeof download;\n}): Promise<LanguageModelV1Prompt> {\n const downloadedAssets = await downloadAssets(\n prompt.messages,\n downloadImplementation,\n modelSupportsImageUrls,\n modelSupportsUrl,\n );\n\n return [\n ...(prompt.system != null\n ? [{ role: 'system' as const, content: prompt.system }]\n : []),\n ...prompt.messages.map(message =>\n convertToLanguageModelMessage(message, downloadedAssets),\n ),\n ];\n}\n\n/**\n * Convert a CoreMessage to a LanguageModelV1Message.\n *\n * @param message The CoreMessage to convert.\n * @param downloadedAssets A map of URLs to their downloaded data. Only\n * available if the model does not support URLs, null otherwise.\n */\nexport function convertToLanguageModelMessage(\n message: CoreMessage,\n downloadedAssets: Record<\n string,\n { mimeType: string | undefined; data: Uint8Array }\n >,\n): LanguageModelV1Message {\n const role = message.role;\n switch (role) {\n case 'system': {\n return {\n role: 'system',\n content: message.content,\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'user': {\n if (typeof message.content === 'string') {\n return {\n role: 'user',\n content: [{ type: 'text', text: message.content }],\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n return {\n role: 'user',\n content: message.content\n .map(part => convertPartToLanguageModelPart(part, downloadedAssets))\n // remove empty text parts:\n .filter(part => part.type !== 'text' || part.text !== ''),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'assistant': {\n if (typeof message.content === 'string') {\n return {\n role: 'assistant',\n content: [{ type: 'text', text: message.content }],\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n return {\n role: 'assistant',\n content: message.content\n .filter(\n // remove empty text parts:\n part => part.type !== 'text' || part.text !== '',\n )\n .map(part => {\n const { experimental_providerMetadata, ...rest } = part;\n return {\n ...rest,\n providerMetadata: experimental_providerMetadata,\n };\n }),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'tool': {\n return {\n role: 'tool',\n content: message.content.map(part => ({\n type: 'tool-result',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n content: part.experimental_content,\n isError: part.isError,\n providerMetadata: part.experimental_providerMetadata,\n })),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new InvalidMessageRoleError({ role: _exhaustiveCheck });\n }\n }\n}\n\n/**\n * Downloads images and files from URLs in the messages.\n */\nasync function downloadAssets(\n messages: CoreMessage[],\n downloadImplementation: typeof download,\n modelSupportsImageUrls: boolean | undefined,\n modelSupportsUrl: (url: URL) => boolean,\n): Promise<Record<string, { mimeType: string | undefined; data: Uint8Array }>> {\n const urls = messages\n .filter(message => message.role === 'user')\n .map(message => message.content)\n .filter((content): content is Array<TextPart | ImagePart | FilePart> =>\n Array.isArray(content),\n )\n .flat()\n .filter(\n (part): part is ImagePart | FilePart =>\n part.type === 'image' || part.type === 'file',\n )\n /**\n * Filter out image parts if the model supports image URLs, before letting it\n * decide if it supports a particular URL.\n */\n .filter(\n (part): part is ImagePart | FilePart =>\n !(part.type === 'image' && modelSupportsImageUrls === true),\n )\n .map(part => (part.type === 'image' ? part.image : part.data))\n .map(part =>\n // support string urls:\n typeof part === 'string' &&\n (part.startsWith('http:') || part.startsWith('https:'))\n ? new URL(part)\n : part,\n )\n .filter((image): image is URL => image instanceof URL)\n /**\n * Filter out URLs that the model supports natively, so we don't download them.\n */\n .filter(url => !modelSupportsUrl(url));\n\n // download in parallel:\n const downloadedImages = await Promise.all(\n urls.map(async url => ({\n url,\n data: await downloadImplementation({ url }),\n })),\n );\n\n return Object.fromEntries(\n downloadedImages.map(({ url, data }) => [url.toString(), data]),\n );\n}\n\n/**\n * Convert part of a message to a LanguageModelV1Part.\n * @param part The part to convert.\n * @param downloadedAssets A map of URLs to their downloaded data. Only\n * available if the model does not support URLs, null otherwise.\n *\n * @returns The converted part.\n */\nfunction convertPartToLanguageModelPart(\n part: TextPart | ImagePart | FilePart,\n downloadedAssets: Record<\n string,\n { mimeType: string | undefined; data: Uint8Array }\n >,\n):\n | LanguageModelV1TextPart\n | LanguageModelV1ImagePart\n | LanguageModelV1FilePart {\n if (part.type === 'text') {\n return {\n type: 'text',\n text: part.text,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n\n let mimeType: string | undefined = part.mimeType;\n let data: DataContent | URL;\n let content: URL | ArrayBuffer | string;\n let normalizedData: Uint8Array | URL;\n\n const type = part.type;\n switch (type) {\n case 'image':\n data = part.image;\n break;\n case 'file':\n data = part.data;\n break;\n default:\n throw new Error(`Unsupported part type: ${type}`);\n }\n\n // Attempt to create a URL from the data. If it fails, we can assume the data\n // is not a URL and likely some other sort of data.\n try {\n content = typeof data === 'string' ? new URL(data) : data;\n } catch (error) {\n content = data;\n }\n\n // If we successfully created a URL, we can use that to normalize the data\n // either by passing it through or converting normalizing the base64 content\n // to a Uint8Array.\n if (content instanceof URL) {\n // If the content is a data URL, we want to convert that to a Uint8Array\n if (content.protocol === 'data:') {\n const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(\n content.toString(),\n );\n\n if (dataUrlMimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format in part ${type}`);\n }\n\n mimeType = dataUrlMimeType;\n normalizedData = convertDataContentToUint8Array(base64Content);\n } else {\n /**\n * If the content is a URL, we should first see if it was downloaded. And if not,\n * we can let the model decide if it wants to support the URL. This also allows\n * for non-HTTP URLs to be passed through (e.g. gs://).\n */\n const downloadedFile = downloadedAssets[content.toString()];\n if (downloadedFile) {\n normalizedData = downloadedFile.data;\n mimeType ??= downloadedFile.mimeType;\n } else {\n normalizedData = content;\n }\n }\n } else {\n // Since we know know the content is not a URL, we can attempt to normalize the data\n // assuming it is some sort of data.\n normalizedData = convertDataContentToUint8Array(content);\n }\n\n // Now that we have the normalized data either as a URL or a Uint8Array,\n // we can create the LanguageModelV1Part.\n switch (type) {\n case 'image': {\n // When possible, try to detect the mimetype automatically\n // to deal with incorrect mimetype inputs.\n // When detection fails, use provided mimetype.\n\n if (normalizedData instanceof Uint8Array) {\n mimeType = detectImageMimeType(normalizedData) ?? mimeType;\n }\n return {\n type: 'image',\n image: normalizedData,\n mimeType,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n\n case 'file': {\n // We should have a mimeType at this point, if not, throw an error.\n if (mimeType == null) {\n throw new Error(`Mime type is missing for file part`);\n }\n\n return {\n type: 'file',\n data:\n normalizedData instanceof Uint8Array\n ? convertDataContentToBase64String(normalizedData)\n : normalizedData,\n mimeType,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n }\n}\n","import { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { CallSettings } from './call-settings';\n\n/**\n * Validates call settings and sets default values.\n */\nexport function prepareCallSettings({\n maxTokens,\n temperature,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences,\n seed,\n}: Omit<CallSettings, 'abortSignal' | 'headers' | 'maxRetries'>): Omit<\n CallSettings,\n 'abortSignal' | 'headers' | 'maxRetries'\n> {\n if (maxTokens != null) {\n if (!Number.isInteger(maxTokens)) {\n throw new InvalidArgumentError({\n parameter: 'maxTokens',\n value: maxTokens,\n message: 'maxTokens must be an integer',\n });\n }\n\n if (maxTokens < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxTokens',\n value: maxTokens,\n message: 'maxTokens must be >= 1',\n });\n }\n }\n\n if (temperature != null) {\n if (typeof temperature !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'temperature',\n value: temperature,\n message: 'temperature must be a number',\n });\n }\n }\n\n if (topP != null) {\n if (typeof topP !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'topP',\n value: topP,\n message: 'topP must be a number',\n });\n }\n }\n\n if (topK != null) {\n if (typeof topK !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'topK',\n value: topK,\n message: 'topK must be a number',\n });\n }\n }\n\n if (presencePenalty != null) {\n if (typeof presencePenalty !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'presencePenalty',\n value: presencePenalty,\n message: 'presencePenalty must be a number',\n });\n }\n }\n\n if (frequencyPenalty != null) {\n if (typeof frequencyPenalty !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'frequencyPenalty',\n value: frequencyPenalty,\n message: 'frequencyPenalty must be a number',\n });\n }\n }\n\n if (seed != null) {\n if (!Number.isInteger(seed)) {\n throw new InvalidArgumentError({\n parameter: 'seed',\n value: seed,\n message: 'seed must be an integer',\n });\n }\n }\n\n return {\n maxTokens,\n temperature: temperature ?? 0,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences:\n stopSequences != null && stopSequences.length > 0\n ? stopSequences\n : undefined,\n seed,\n };\n}\n","import { InvalidPromptError } from '@ai-sdk/provider';\nimport { safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { CoreMessage, coreMessageSchema } from './message';\nimport { Prompt } from './prompt';\nimport { detectPromptType } from './detect-prompt-type';\nimport { convertToCoreMessages } from './convert-to-core-messages';\nimport { UIMessage } from './ui-message';\nimport { CoreTool } from '../tool/tool';\n\nexport type StandardizedPrompt = {\n /**\n * Original prompt type. This is forwarded to the providers and can be used\n * to write send raw text to providers that support it.\n */\n type: 'prompt' | 'messages';\n\n /**\n * System message.\n */\n system?: string;\n\n /**\n * Messages.\n */\n messages: CoreMessage[];\n};\n\nexport function standardizePrompt<TOOLS extends Record<string, CoreTool>>({\n prompt,\n tools,\n}: {\n prompt: Prompt;\n tools: undefined | TOOLS;\n}): StandardizedPrompt {\n if (prompt.prompt == null && prompt.messages == null) {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt or messages must be defined',\n });\n }\n\n if (prompt.prompt != null && prompt.messages != null) {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt and messages cannot be defined at the same time',\n });\n }\n\n // validate that system is a string\n if (prompt.system != null && typeof prompt.system !== 'string') {\n throw new InvalidPromptError({\n prompt,\n message: 'system must be a string',\n });\n }\n\n // type: prompt\n if (prompt.prompt != null) {\n // validate that prompt is a string\n if (typeof prompt.prompt !== 'string') {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt must be a string',\n });\n }\n\n return {\n type: 'prompt',\n system: prompt.system,\n messages: [\n {\n role: 'user',\n content: prompt.prompt,\n },\n ],\n };\n }\n\n // type: messages\n if (prompt.messages != null) {\n const promptType = detectPromptType(prompt.messages);\n\n if (promptType === 'other') {\n throw new InvalidPromptError({\n prompt,\n message: 'messages must be an array of CoreMessage or UIMessage',\n });\n }\n\n const messages: CoreMessage[] =\n promptType === 'ui-messages'\n ? convertToCoreMessages(prompt.messages as UIMessage[], {\n tools,\n })\n : (prompt.messages as CoreMessage[]);\n\n const validationResult = safeValidateTypes({\n value: messages,\n schema: z.array(coreMessageSchema),\n });\n\n if (!validationResult.success) {\n throw new InvalidPromptError({\n prompt,\n message: 'messages must be an array of CoreMessage or UIMessage',\n cause: validationResult.error,\n });\n }\n\n return {\n type: 'messages',\n messages,\n system: prompt.system,\n };\n }\n\n throw new Error('unreachable');\n}\n","import { z } from 'zod';\nimport { ProviderMetadata } from '../types';\nimport { providerMetadataSchema } from '../types/provider-metadata';\nimport {\n FilePart,\n filePartSchema,\n ImagePart,\n imagePartSchema,\n TextPart,\n textPartSchema,\n ToolCallPart,\n toolCallPartSchema,\n ToolResultPart,\n toolResultPartSchema,\n} from './content-part';\n\n/**\n A system message. It can contain system information.\n\n Note: using the \"system\" part of the prompt is strongly preferred\n to increase the resilience against prompt injection attacks,\n and because not all providers support several system messages.\n */\nexport type CoreSystemMessage = {\n role: 'system';\n content: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreSystemMessageSchema: z.ZodType<CoreSystemMessage> = z.object({\n role: z.literal('system'),\n content: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nA user message. It can contain text or a combination of text and images.\n */\nexport type CoreUserMessage = {\n role: 'user';\n content: UserContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreUserMessageSchema: z.ZodType<CoreUserMessage> = z.object({\n role: z.literal('user'),\n content: z.union([\n z.string(),\n z.array(z.union([textPartSchema, imagePartSchema, filePartSchema])),\n ]),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nContent of a user message. It can be a string or an array of text and image parts.\n */\nexport type UserContent = string | Array<TextPart | ImagePart | FilePart>;\n\n/**\nAn assistant message. It can contain text, tool calls, or a combination of text and tool calls.\n */\nexport type CoreAssistantMessage = {\n role: 'assistant';\n content: AssistantContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreAssistantMessageSchema: z.ZodType<CoreAssistantMessage> =\n z.object({\n role: z.literal('assistant'),\n content: z.union([\n z.string(),\n z.array(z.union([textPartSchema, toolCallPartSchema])),\n ]),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n });\n\n/**\nContent of an assistant message. It can be a string or an array of text and tool call parts.\n */\nexport type AssistantContent = string | Array<TextPart | ToolCallPart>;\n\n/**\nA tool message. It contains the result of one or more tool calls.\n */\nexport type CoreToolMessage = {\n role: 'tool';\n content: ToolContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreToolMessageSchema: z.ZodType<CoreToolMessage> = z.object({\n role: z.literal('tool'),\n content: z.array(toolResultPartSchema),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nContent of a tool message. It is an array of tool result parts.\n */\nexport type ToolContent = Array<ToolResultPart>;\n\n/**\nA message that can be used in the `messages` field of a prompt.\nIt can be a user message, an assistant message, or a tool message.\n */\nexport type CoreMessage =\n | CoreSystemMessage\n | CoreUserMessage\n | CoreAssistantMessage\n | CoreToolMessage;\n\nexport const coreMessageSchema: z.ZodType<CoreMessage> = z.union([\n coreSystemMessageSchema,\n coreUserMessageSchema,\n coreAssistantMessageSchema,\n coreToolMessageSchema,\n]);\n","import { LanguageModelV1ProviderMetadata } from '@ai-sdk/provider';\nimport { z } from 'zod';\nimport { jsonValueSchema } from './json-value';\n\n/**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\nexport type ProviderMetadata = LanguageModelV1ProviderMetadata;\n\nexport const providerMetadataSchema: z.ZodType<ProviderMetadata> = z.record(\n z.string(),\n z.record(z.string(), jsonValueSchema),\n);\n","import { JSONValue } from '@ai-sdk/provider';\nimport { z } from 'zod';\n\nexport const jsonValueSchema: z.ZodType<JSONValue> = z.lazy(() =>\n z.union([\n z.null(),\n z.string(),\n z.number(),\n z.boolean(),\n z.record(z.string(), jsonValueSchema),\n z.array(jsonValueSchema),\n ]),\n);\n","import { z } from 'zod';\nimport {\n ProviderMetadata,\n providerMetadataSchema,\n} from '../types/provider-metadata';\nimport { DataContent, dataContentSchema } from './data-content';\nimport {\n ToolResultContent,\n toolResultContentSchema,\n} from './tool-result-content';\n\n/**\nText content part of a prompt. It contains a string of text.\n */\nexport interface TextPart {\n type: 'text';\n\n /**\nThe text content.\n */\n text: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const textPartSchema: z.ZodType<TextPart> = z.object({\n type: z.literal('text'),\n text: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nImage content part of a prompt. It contains an image.\n */\nexport interface ImagePart {\n type: 'image';\n\n /**\nImage data. Can either be:\n\n- data: a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer\n- URL: a URL that points to the image\n */\n image: DataContent | URL;\n\n /**\nOptional mime type of the image.\n */\n mimeType?: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const imagePartSchema: z.ZodType<ImagePart> = z.object({\n type: z.literal('image'),\n image: z.union([dataContentSchema, z.instanceof(URL)]),\n mimeType: z.string().optional(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nFile content part of a prompt. It contains a file.\n */\nexport interface FilePart {\n type: 'file';\n\n /**\nFile data. Can either be:\n\n- data: a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer\n- URL: a URL that points to the image\n */\n data: DataContent | URL;\n\n /**\nMime type of the file.\n */\n mimeType: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const filePartSchema: z.ZodType<FilePart> = z.object({\n type: z.literal('file'),\n data: z.union([dataContentSchema, z.instanceof(URL)]),\n mimeType: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nTool call content part of a prompt. It contains a tool call (usually generated by the AI model).\n */\nexport interface ToolCallPart {\n type: 'tool-call';\n\n /**\nID of the tool call. This ID is used to match the tool call with the tool result.\n */\n toolCallId: string;\n\n /**\nName of the tool that is being called.\n */\n toolName: string;\n\n /**\nArguments of the tool call. This is a JSON-serializable object that matches the tool's input schema.\n */\n args: unknown;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const toolCallPartSchema: z.ZodType<ToolCallPart> = z.object({\n type: z.literal('tool-call'),\n toolCallId: z.string(),\n toolName: z.string(),\n args: z.unknown(),\n}) as z.ZodType<ToolCallPart>; // necessary bc args is optional on Zod type\n\n/**\nTool result content part of a prompt. It contains the result of the tool call with the matching ID.\n */\nexport interface ToolResultPart {\n type: 'tool-result';\n\n /**\nID of the tool call that this result is associated with.\n */\n toolCallId: string;\n\n /**\nName of the tool that generated this result.\n */\n toolName: string;\n\n /**\nResult of the tool call. This is a JSON-serializable object.\n */\n result: unknown;\n\n /**\nMulti-part content of the tool result. Only for tools that support multipart results.\n */\n experimental_content?: ToolResultContent;\n\n /**\nOptional flag if the result is an error or an error message.\n */\n isError?: boolean;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const toolResultPartSchema: z.ZodType<ToolResultPart> = z.object({\n type: z.literal('tool-result'),\n toolCallId: z.string(),\n toolName: z.string(),\n result: z.unknown(),\n content: toolResultContentSchema.optional(),\n isError: z.boolean().optional(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n}) as z.ZodType<ToolResultPart>; // necessary bc result is optional on Zod type\n","import { z } from 'zod';\n\nexport type ToolResultContent = Array<\n | {\n type: 'text';\n text: string;\n }\n | {\n type: 'image';\n data: string; // base64 encoded png image, e.g. screenshot\n mimeType?: string; // e.g. 'image/png';\n }\n>;\n\nexport const toolResultContentSchema: z.ZodType<ToolResultContent> = z.array(\n z.union([\n z.object({ type: z.literal('text'), text: z.string() }),\n z.object({\n type: z.literal('image'),\n data: z.string(),\n mimeType: z.string().optional(),\n }),\n ]),\n);\n\nexport function isToolResultContent(\n value: unknown,\n): value is ToolResultContent {\n if (!Array.isArray(value) || value.length === 0) {\n return false;\n }\n\n return value.every(part => {\n if (typeof part !== 'object' || part === null) {\n return false;\n }\n\n if (part.type === 'text') {\n return typeof part.text === 'string';\n }\n\n if (part.type === 'image') {\n return (\n typeof part.data === 'string' &&\n (part.mimeType === undefined || typeof part.mimeType === 'string')\n );\n }\n\n return false;\n });\n}\n","export function detectPromptType(\n prompt: Array<any>,\n): 'ui-messages' | 'messages' | 'other' {\n if (!Array.isArray(prompt)) {\n return 'other';\n }\n\n if (prompt.length === 0) {\n return 'messages';\n }\n\n const characteristics = prompt.map(detectSingleMessageCharacteristics);\n\n if (characteristics.some(c => c === 'has-ui-specific-parts')) {\n return 'ui-messages';\n } else if (\n characteristics.every(\n c => c === 'has-core-specific-parts' || c === 'message',\n )\n ) {\n return 'messages';\n } else {\n return 'other';\n }\n}\n\nfunction detectSingleMessageCharacteristics(\n message: any,\n): 'has-ui-specific-parts' | 'has-core-specific-parts' | 'message' | 'other' {\n if (\n typeof message === 'object' &&\n message !== null &&\n (message.role === 'function' || // UI-only role\n message.role === 'data' || // UI-only role\n 'toolInvocations' in message || // UI-specific field\n 'experimental_attachments' in message)\n ) {\n return 'has-ui-specific-parts';\n } else if (\n typeof message === 'object' &&\n message !== null &&\n 'content' in message &&\n (Array.isArray(message.content) || // Core messages can have array content\n 'experimental_providerMetadata' in message)\n ) {\n return 'has-core-specific-parts';\n } else if (\n typeof message === 'object' &&\n message !== null &&\n 'role' in message &&\n 'content' in message &&\n typeof message.content === 'string' &&\n ['system', 'user', 'assistant', 'tool'].includes(message.role)\n ) {\n return 'message';\n } else {\n return 'other';\n }\n}\n","import { Attachment } from '@ai-sdk/ui-utils';\nimport { FilePart, ImagePart, TextPart } from './content-part';\nimport {\n convertDataContentToUint8Array,\n convertUint8ArrayToText,\n} from './data-content';\n\ntype ContentPart = TextPart | ImagePart | FilePart;\n\n/**\n * Converts a list of attachments to a list of content parts\n * for consumption by `ai/core` functions.\n * Currently only supports images and text attachments.\n */\nexport function attachmentsToParts(attachments: Attachment[]): ContentPart[] {\n const parts: ContentPart[] = [];\n\n for (const attachment of attachments) {\n let url;\n\n try {\n url = new URL(attachment.url);\n } catch (error) {\n throw new Error(`Invalid URL: ${attachment.url}`);\n }\n\n switch (url.protocol) {\n case 'http:':\n case 'https:': {\n if (attachment.contentType?.startsWith('image/')) {\n parts.push({ type: 'image', image: url });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n 'If the attachment is not an image, it must specify a content type',\n );\n }\n\n parts.push({\n type: 'file',\n data: url,\n mimeType: attachment.contentType,\n });\n }\n break;\n }\n\n case 'data:': {\n let header;\n let base64Content;\n let mimeType;\n\n try {\n [header, base64Content] = attachment.url.split(',');\n mimeType = header.split(';')[0].split(':')[1];\n } catch (error) {\n throw new Error(`Error processing data URL: ${attachment.url}`);\n }\n\n if (mimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format: ${attachment.url}`);\n }\n\n if (attachment.contentType?.startsWith('image/')) {\n parts.push({\n type: 'image',\n image: convertDataContentToUint8Array(base64Content),\n });\n } else if (attachment.contentType?.startsWith('text/')) {\n parts.push({\n type: 'text',\n text: convertUint8ArrayToText(\n convertDataContentToUint8Array(base64Content),\n ),\n });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n 'If the attachment is not an image or text, it must specify a content type',\n );\n }\n\n parts.push({\n type: 'file',\n data: base64Content,\n mimeType: attachment.contentType,\n });\n }\n\n break;\n }\n\n default: {\n throw new Error(`Unsupported URL protocol: ${url.protocol}`);\n }\n }\n }\n\n return parts;\n}\n","import { AISDKError } from '@ai-sdk/provider';\nimport { UIMessage } from './ui-message';\n\nconst name = 'AI_MessageConversionError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class MessageConversionError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly originalMessage: UIMessage;\n\n constructor({\n originalMessage,\n message,\n }: {\n originalMessage: UIMessage;\n message: string;\n }) {\n super({ name, message });\n\n this.originalMessage = originalMessage;\n }\n\n static isInstance(error: unknown): error is MessageConversionError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { CoreMessage, ToolCallPart, ToolResultPart } from '../prompt';\nimport { CoreTool } from '../tool/tool';\nimport { attachmentsToParts } from './attachments-to-parts';\nimport { MessageConversionError } from './message-conversion-error';\nimport { UIMessage } from './ui-message';\n\n/**\nConverts an array of messages from useChat into an array of CoreMessages that can be used\nwith the AI core functions (e.g. `streamText`).\n */\nexport function convertToCoreMessages<\n TOOLS extends Record<string, CoreTool> = never,\n>(messages: Array<UIMessage>, options?: { tools?: TOOLS }) {\n const tools = options?.tools ?? ({} as TOOLS);\n const coreMessages: CoreMessage[] = [];\n\n for (const message of messages) {\n const { role, content, toolInvocations, experimental_attachments } =\n message;\n\n switch (role) {\n case 'system': {\n coreMessages.push({\n role: 'system',\n content,\n });\n break;\n }\n\n case 'user': {\n coreMessages.push({\n role: 'user',\n content: experimental_attachments\n ? [\n { type: 'text', text: content },\n ...attachmentsToParts(experimental_attachments),\n ]\n : content,\n });\n break;\n }\n\n case 'assistant': {\n if (toolInvocations == null) {\n coreMessages.push({ role: 'assistant', content });\n break;\n }\n\n // assistant message with tool calls\n coreMessages.push({\n role: 'assistant',\n content: [\n { type: 'text', text: content },\n ...toolInvocations.map(\n ({ toolCallId, toolName, args }): ToolCallPart => ({\n type: 'tool-call' as const,\n toolCallId,\n toolName,\n args,\n }),\n ),\n ],\n });\n\n // tool message with tool results\n coreMessages.push({\n role: 'tool',\n content: toolInvocations.map((toolInvocation): ToolResultPart => {\n if (!('result' in toolInvocation)) {\n throw new MessageConversionError({\n originalMessage: message,\n message:\n 'ToolInvocation must have a result: ' +\n JSON.stringify(toolInvocation),\n });\n }\n\n const { toolCallId, toolName, result } = toolInvocation;\n\n const tool = tools[toolName];\n return tool?.experimental_toToolResultContent != null\n ? {\n type: 'tool-result',\n toolCallId,\n toolName,\n result: tool.experimental_toToolResultContent(result),\n experimental_content:\n tool.experimental_toToolResultContent(result),\n }\n : {\n type: 'tool-result',\n toolCallId,\n toolName,\n result,\n };\n }),\n });\n\n break;\n }\n\n case 'data': {\n // ignore\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new MessageConversionError({\n originalMessage: message,\n message: `Unsupported role: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n return coreMessages;\n}\n","/**\nRepresents the number of tokens used in a prompt and completion.\n */\nexport type LanguageModelUsage = {\n /**\nThe number of tokens used in the prompt.\n */\n promptTokens: number;\n\n /**\nThe number of tokens used in the completion.\n */\n completionTokens: number;\n\n /**\nThe total number of tokens used (promptTokens + completionTokens).\n */\n totalTokens: number;\n};\n\n/**\nRepresents the number of tokens used in an embedding.\n */\nexport type EmbeddingModelUsage = {\n /**\nThe number of tokens used in the embedding.\n */\n tokens: number;\n};\n\nexport function calculateLanguageModelUsage({\n promptTokens,\n completionTokens,\n}: {\n promptTokens: number;\n completionTokens: number;\n}): LanguageModelUsage {\n return {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens,\n };\n}\n\nexport function addLanguageModelUsage(\n usage1: LanguageModelUsage,\n usage2: LanguageModelUsage,\n): LanguageModelUsage {\n return {\n promptTokens: usage1.promptTokens + usage2.promptTokens,\n completionTokens: usage1.completionTokens + usage2.completionTokens,\n totalTokens: usage1.totalTokens + usage2.totalTokens,\n };\n}\n","import { JSONSchema7 } from '@ai-sdk/provider';\n\nconst DEFAULT_SCHEMA_PREFIX = 'JSON schema:';\nconst DEFAULT_SCHEMA_SUFFIX =\n 'You MUST answer with a JSON object that matches the JSON schema above.';\nconst DEFAULT_GENERIC_SUFFIX = 'You MUST answer with JSON.';\n\nexport function injectJsonInstruction({\n prompt,\n schema,\n schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : undefined,\n schemaSuffix = schema != null\n ? DEFAULT_SCHEMA_SUFFIX\n : DEFAULT_GENERIC_SUFFIX,\n}: {\n prompt?: string;\n schema?: JSONSchema7;\n schemaPrefix?: string;\n schemaSuffix?: string;\n}): string {\n return [\n prompt != null && prompt.length > 0 ? prompt : undefined,\n prompt != null && prompt.length > 0 ? '' : undefined, // add a newline if prompt is not null\n schemaPrefix,\n schema != null ? JSON.stringify(schema) : undefined,\n schemaSuffix,\n ]\n .filter(line => line != null)\n .join('\\n');\n}\n","import {\n isJSONArray,\n isJSONObject,\n JSONObject,\n JSONSchema7,\n JSONValue,\n TypeValidationError,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { safeValidateTypes, ValidationResult } from '@ai-sdk/provider-utils';\nimport { asSchema, DeepPartial, Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { ObjectStreamPart } from './stream-object-result';\nimport { LanguageModelResponseMetadata, LanguageModelUsage } from '../types';\n\nexport interface OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM> {\n readonly type: 'object' | 'array' | 'enum' | 'no-schema';\n readonly jsonSchema: JSONSchema7 | undefined;\n\n validatePartialResult({\n value,\n textDelta,\n isFinalDelta,\n }: {\n value: JSONValue;\n textDelta: string;\n isFirstDelta: boolean;\n isFinalDelta: boolean;\n latestObject: PARTIAL | undefined;\n }): ValidationResult<{\n partial: PARTIAL;\n textDelta: string;\n }>;\n validateFinalResult(\n value: JSONValue | undefined,\n context: {\n text: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): ValidationResult<RESULT>;\n\n createElementStream(\n originalStream: ReadableStream<ObjectStreamPart<PARTIAL>>,\n ): ELEMENT_STREAM;\n}\n\nconst noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {\n type: 'no-schema',\n jsonSchema: undefined,\n\n validatePartialResult({ value, textDelta }) {\n return { success: true, value: { partial: value, textDelta } };\n },\n\n validateFinalResult(\n value: JSONValue | undefined,\n context: {\n text: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): ValidationResult<JSONValue> {\n return value === undefined\n ? {\n success: false,\n error: new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n text: context.text,\n response: context.response,\n usage: context.usage,\n }),\n }\n : { success: true, value };\n },\n\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in no-schema mode',\n });\n },\n};\n\nconst objectOutputStrategy = <OBJECT>(\n schema: Schema<OBJECT>,\n): OutputStrategy<DeepPartial<OBJECT>, OBJECT, never> => ({\n type: 'object',\n jsonSchema: schema.jsonSchema,\n\n validatePartialResult({ value, textDelta }) {\n return {\n success: true,\n value: {\n // Note: currently no validation of partial results:\n partial: value as DeepPartial<OBJECT>,\n textDelta,\n },\n };\n },\n\n validateFinalResult(value: JSONValue | undefined): ValidationResult<OBJECT> {\n return safeValidateTypes({ value, schema });\n },\n\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in object mode',\n });\n },\n});\n\nconst arrayOutputStrategy = <ELEMENT>(\n schema: Schema<ELEMENT>,\n): OutputStrategy<ELEMENT[], ELEMENT[], AsyncIterableStream<ELEMENT>> => {\n // remove $schema from schema.jsonSchema:\n const { $schema, ...itemSchema } = schema.jsonSchema;\n\n return {\n type: 'enum',\n\n // wrap in object that contains array of elements, since most LLMs will not\n // be able to generate an array directly:\n // possible future optimization: use arrays directly when model supports grammar-guided generation\n jsonSchema: {\n $schema: 'http://json-schema.org/draft-07/schema#',\n type: 'object',\n properties: {\n elements: { type: 'array', items: itemSchema },\n },\n required: ['elements'],\n additionalProperties: false,\n },\n\n validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be an object that contains an array of elements',\n }),\n };\n }\n\n const inputArray = value.elements as Array<JSONObject>;\n const resultArray: Array<ELEMENT> = [];\n\n for (let i = 0; i < inputArray.length; i++) {\n const element = inputArray[i];\n const result = safeValidateTypes({ value: element, schema });\n\n // special treatment for last processed element:\n // ignore parse or validation failures, since they indicate that the\n // last element is incomplete and should not be included in the result,\n // unless it is the final delta\n if (i === inputArray.length - 1 && !isFinalDelta) {\n continue;\n }\n\n if (!result.success) {\n return result;\n }\n\n resultArray.push(result.value);\n }\n\n // calculate delta:\n const publishedElementCount = latestObject?.length ?? 0;\n\n let textDelta = '';\n\n if (isFirstDelta) {\n textDelta += '[';\n }\n\n if (publishedElementCount > 0) {\n textDelta += ',';\n }\n\n textDelta += resultArray\n .slice(publishedElementCount) // only new elements\n .map(element => JSON.stringify(element))\n .join(',');\n\n if (isFinalDelta) {\n textDelta += ']';\n }\n\n return {\n success: true,\n value: {\n partial: resultArray,\n textDelta,\n },\n };\n },\n\n validateFinalResult(\n value: JSONValue | undefined,\n ): ValidationResult<Array<ELEMENT>> {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be an object that contains an array of elements',\n }),\n };\n }\n\n const inputArray = value.elements as Array<JSONObject>;\n\n // check that each element in the array is of the correct type:\n for (const element of inputArray) {\n const result = safeValidateTypes({ value: element, schema });\n if (!result.success) {\n return result;\n }\n }\n\n return { success: true, value: inputArray as Array<ELEMENT> };\n },\n\n createElementStream(\n originalStream: ReadableStream<ObjectStreamPart<ELEMENT[]>>,\n ) {\n let publishedElements = 0;\n\n return createAsyncIterableStream(\n originalStream.pipeThrough(\n new TransformStream<ObjectStreamPart<ELEMENT[]>, ELEMENT>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'object': {\n const array = chunk.object;\n\n // publish new elements one by one:\n for (\n ;\n publishedElements < array.length;\n publishedElements++\n ) {\n controller.enqueue(array[publishedElements]);\n }\n\n break;\n }\n\n case 'text-delta':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(\n `Unsupported chunk type: ${_exhaustiveCheck}`,\n );\n }\n }\n },\n }),\n ),\n );\n },\n };\n};\n\nconst enumOutputStrategy = <ENUM extends string>(\n enumValues: Array<ENUM>,\n): OutputStrategy<ENUM, ENUM, never> => {\n return {\n type: 'enum',\n\n // wrap in object that contains result, since most LLMs will not\n // be able to generate an enum value directly:\n // possible future optimization: use enums directly when model supports top-level enums\n jsonSchema: {\n $schema: 'http://json-schema.org/draft-07/schema#',\n type: 'object',\n properties: {\n result: { type: 'string', enum: enumValues },\n },\n required: ['result'],\n additionalProperties: false,\n },\n\n validateFinalResult(value: JSONValue | undefined): ValidationResult<ENUM> {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || typeof value.result !== 'string') {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause:\n 'value must be an object that contains a string in the \"result\" property.',\n }),\n };\n }\n\n const result = value.result as string;\n\n return enumValues.includes(result as ENUM)\n ? { success: true, value: result as ENUM }\n : {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be a string in the enum',\n }),\n };\n },\n\n validatePartialResult() {\n // no streaming in enum mode\n throw new UnsupportedFunctionalityError({\n functionality: 'partial results in enum mode',\n });\n },\n\n createElementStream() {\n // no streaming in enum mode\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in enum mode',\n });\n },\n };\n};\n\nexport function getOutputStrategy<SCHEMA>({\n output,\n schema,\n enumValues,\n}: {\n output: 'object' | 'array' | 'enum' | 'no-schema';\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n enumValues?: Array<SCHEMA>;\n}): OutputStrategy<any, any, any> {\n switch (output) {\n case 'object':\n return objectOutputStrategy(asSchema(schema!));\n case 'array':\n return arrayOutputStrategy(asSchema(schema!));\n case 'enum':\n return enumOutputStrategy(enumValues! as Array<string>);\n case 'no-schema':\n return noSchemaOutputStrategy;\n default: {\n const _exhaustiveCheck: never = output;\n throw new Error(`Unsupported output: ${_exhaustiveCheck}`);\n }\n }\n}\n","export type AsyncIterableStream<T> = AsyncIterable<T> & ReadableStream<T>;\n\nexport function createAsyncIterableStream<T>(\n source: ReadableStream<T>,\n): AsyncIterableStream<T> {\n const stream = source.pipeThrough(new TransformStream<T, T>());\n\n (stream as AsyncIterableStream<T>)[Symbol.asyncIterator] = () => {\n const reader = stream.getReader();\n return {\n async next(): Promise<IteratorResult<T>> {\n const { done, value } = await reader.read();\n return done ? { done: true, value: undefined } : { done: false, value };\n },\n };\n };\n\n return stream as AsyncIterableStream<T>;\n}\n","import { z } from 'zod';\nimport { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { Schema } from '@ai-sdk/ui-utils';\n\nexport function validateObjectGenerationInput({\n output,\n mode,\n schema,\n schemaName,\n schemaDescription,\n enumValues,\n}: {\n output?: 'object' | 'array' | 'enum' | 'no-schema';\n schema?: z.Schema<any, z.ZodTypeDef, any> | Schema<any>;\n schemaName?: string;\n schemaDescription?: string;\n enumValues?: Array<unknown>;\n mode?: 'auto' | 'json' | 'tool';\n}) {\n if (\n output != null &&\n output !== 'object' &&\n output !== 'array' &&\n output !== 'enum' &&\n output !== 'no-schema'\n ) {\n throw new InvalidArgumentError({\n parameter: 'output',\n value: output,\n message: 'Invalid output type.',\n });\n }\n\n if (output === 'no-schema') {\n if (mode === 'auto' || mode === 'tool') {\n throw new InvalidArgumentError({\n parameter: 'mode',\n value: mode,\n message: 'Mode must be \"json\" for no-schema output.',\n });\n }\n\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is not supported for no-schema output.',\n });\n }\n\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaDescription',\n value: schemaDescription,\n message: 'Schema description is not supported for no-schema output.',\n });\n }\n\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaName',\n value: schemaName,\n message: 'Schema name is not supported for no-schema output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for no-schema output.',\n });\n }\n }\n\n if (output === 'object') {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is required for object output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for object output.',\n });\n }\n }\n\n if (output === 'array') {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Element schema is required for array output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for array output.',\n });\n }\n }\n\n if (output === 'enum') {\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is not supported for enum output.',\n });\n }\n\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaDescription',\n value: schemaDescription,\n message: 'Schema description is not supported for enum output.',\n });\n }\n\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaName',\n value: schemaName,\n message: 'Schema name is not supported for enum output.',\n });\n }\n\n if (enumValues == null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are required for enum output.',\n });\n }\n\n for (const value of enumValues) {\n if (typeof value !== 'string') {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value,\n message: 'Enum values must be strings.',\n });\n }\n }\n }\n}\n","import {\n JSONValue,\n LanguageModelV1CallOptions,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport { createIdGenerator } from '@ai-sdk/provider-utils';\nimport {\n DeepPartial,\n Schema,\n isDeepEqualData,\n parsePartialJson,\n} from '@ai-sdk/ui-utils';\nimport { ServerResponse } from 'http';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport { DelayedPromise } from '../../util/delayed-promise';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport {\n CallWarning,\n FinishReason,\n LanguageModel,\n LogProbs,\n} from '../types/language-model';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { ProviderMetadata } from '../types/provider-metadata';\nimport {\n LanguageModelUsage,\n calculateLanguageModelUsage,\n} from '../types/usage';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { createStitchableStream } from '../util/create-stitchable-stream';\nimport { now as originalNow } from '../util/now';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport { injectJsonInstruction } from './inject-json-instruction';\nimport { OutputStrategy, getOutputStrategy } from './output-strategy';\nimport { ObjectStreamPart, StreamObjectResult } from './stream-object-result';\nimport { validateObjectGenerationInput } from './validate-object-generation-input';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });\n\ntype OnFinishCallback<RESULT> = (event: {\n /**\nThe token usage of the generated response.\n*/\n usage: LanguageModelUsage;\n\n /**\nThe generated object. Can be undefined if the final object does not match the schema.\n*/\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n*/\n error: unknown | undefined;\n\n /**\nResponse metadata.\n */\n response: LanguageModelResponseMetadata;\n\n /**\nWarnings from the model provider (e.g. unsupported settings).\n*/\n warnings?: CallWarning[];\n\n /**\nAdditional provider-specific metadata. They are passed through\nfrom the provider to the AI SDK and enable provider-specific\nresults that can be fully encapsulated in the provider.\n*/\n experimental_providerMetadata: ProviderMetadata | undefined;\n}) => Promise<void> | void;\n\n/**\nGenerate a structured, typed object for a given prompt and schema using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject<OBJECT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output?: 'object' | undefined;\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe schema of the object that the model should generate.\n */\n schema: z.Schema<OBJECT, z.ZodTypeDef, any> | Schema<OBJECT>;\n\n /**\nOptional name of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<OBJECT>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<DeepPartial<OBJECT>, OBJECT, never>;\n/**\nGenerate an array with structured, typed elements for a given prompt and element schema using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject<ELEMENT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'array';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe element schema of the array that the model should generate.\n */\n schema: z.Schema<ELEMENT, z.ZodTypeDef, any> | Schema<ELEMENT>;\n\n /**\nOptional name of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<Array<ELEMENT>>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<\n Array<ELEMENT>,\n Array<ELEMENT>,\n AsyncIterableStream<ELEMENT>\n>;\n/**\nGenerate JSON with any schema for a given prompt using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'no-schema';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe mode to use for object generation. Must be \"json\" for no-schema output.\n */\n mode?: 'json';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<JSONValue>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<JSONValue, JSONValue, never>;\nexport function streamObject<SCHEMA, PARTIAL, RESULT, ELEMENT_STREAM>({\n model,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = 'object',\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n onFinish,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n now = originalNow,\n } = {},\n ...settings\n}: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n /**\n * The expected structure of the output.\n *\n * - 'object': Generate a single object that conforms to the schema.\n * - 'array': Generate an array of objects that conform to the schema.\n * - 'no-schema': Generate any JSON object. No schema is specified.\n *\n * Default is 'object' if not specified.\n */\n output?: 'object' | 'array' | 'no-schema';\n\n model: LanguageModel;\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n schemaName?: string;\n schemaDescription?: string;\n mode?: 'auto' | 'json' | 'tool';\n experimental_telemetry?: TelemetrySettings;\n experimental_providerMetadata?: ProviderMetadata;\n onFinish?: OnFinishCallback<RESULT>;\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n }): StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n });\n\n const outputStrategy = getOutputStrategy({ output, schema: inputSchema });\n\n // automatically set mode to 'json' for no-schema output\n if (outputStrategy.type === 'no-schema' && mode === undefined) {\n mode = 'json';\n }\n\n return new DefaultStreamObjectResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n inputProviderMetadata: providerMetadata,\n mode,\n onFinish,\n generateId,\n currentDate,\n now,\n });\n}\n\nclass DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>\n implements StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>\n{\n private readonly objectPromise = new DelayedPromise<RESULT>();\n private readonly usagePromise = new DelayedPromise<LanguageModelUsage>();\n private readonly providerMetadataPromise = new DelayedPromise<\n ProviderMetadata | undefined\n >();\n private readonly warningsPromise = new DelayedPromise<\n CallWarning[] | undefined\n >();\n private readonly requestPromise =\n new DelayedPromise<LanguageModelRequestMetadata>();\n private readonly responsePromise =\n new DelayedPromise<LanguageModelResponseMetadata>();\n\n private readonly stitchableStream =\n createStitchableStream<ObjectStreamPart<PARTIAL>>();\n\n private readonly outputStrategy: OutputStrategy<\n PARTIAL,\n RESULT,\n ELEMENT_STREAM\n >;\n\n constructor({\n model,\n headers,\n telemetry,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n inputProviderMetadata,\n mode,\n onFinish,\n generateId,\n currentDate,\n now,\n }: {\n model: LanguageModel;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n maxRetries: number | undefined;\n abortSignal: AbortSignal | undefined;\n outputStrategy: OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM>;\n system: Prompt['system'];\n prompt: Prompt['prompt'];\n messages: Prompt['messages'];\n schemaName: string | undefined;\n schemaDescription: string | undefined;\n inputProviderMetadata: ProviderMetadata | undefined;\n mode: 'auto' | 'json' | 'tool' | undefined;\n onFinish: OnFinishCallback<RESULT> | undefined;\n generateId: () => string;\n currentDate: () => Date;\n now: () => number;\n }) {\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg,\n });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n const self = this;\n\n recordSpan({\n name: 'ai.streamObject',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamObject',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.schema':\n outputStrategy.jsonSchema != null\n ? { input: () => JSON.stringify(outputStrategy.jsonSchema) }\n : undefined,\n 'ai.schema.name': schemaName,\n 'ai.schema.description': schemaDescription,\n 'ai.settings.output': outputStrategy.type,\n 'ai.settings.mode': mode,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async rootSpan => {\n // use the default provider mode when the mode is set to 'auto' or unspecified\n if (mode === 'auto' || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n\n let callOptions: LanguageModelV1CallOptions;\n let transformer: Transformer<\n LanguageModelV1StreamPart,\n string | Omit<LanguageModelV1StreamPart, 'text-delta'>\n >;\n\n switch (mode) {\n case 'json': {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system:\n outputStrategy.jsonSchema == null\n ? injectJsonInstruction({ prompt: system })\n : model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema,\n }),\n prompt,\n messages,\n },\n tools: undefined,\n });\n\n callOptions = {\n mode: {\n type: 'object-json',\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription,\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n }),\n providerMetadata: inputProviderMetadata,\n abortSignal,\n headers,\n };\n\n transformer = {\n transform: (chunk, controller) => {\n switch (chunk.type) {\n case 'text-delta':\n controller.enqueue(chunk.textDelta);\n break;\n case 'response-metadata':\n case 'finish':\n case 'error':\n controller.enqueue(chunk);\n break;\n }\n },\n };\n\n break;\n }\n\n case 'tool': {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: undefined,\n });\n\n callOptions = {\n mode: {\n type: 'object-tool',\n tool: {\n type: 'function',\n name: schemaName ?? 'json',\n description:\n schemaDescription ?? 'Respond with a JSON object.',\n parameters: outputStrategy.jsonSchema!,\n },\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n }),\n providerMetadata: inputProviderMetadata,\n abortSignal,\n headers,\n };\n\n transformer = {\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'tool-call-delta':\n controller.enqueue(chunk.argsTextDelta);\n break;\n case 'response-metadata':\n case 'finish':\n case 'error':\n controller.enqueue(chunk);\n break;\n }\n },\n };\n\n break;\n }\n\n case undefined: {\n throw new Error(\n 'Model does not have a default object generation mode.',\n );\n }\n\n default: {\n const _exhaustiveCheck: never = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n\n const {\n result: { stream, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs,\n } = await retry(() =>\n recordSpan({\n name: 'ai.streamObject.doStream',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamObject.doStream',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => callOptions.inputFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(callOptions.prompt),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async doStreamSpan => ({\n startTimestampMs: now(),\n doStreamSpan,\n result: await model.doStream(callOptions),\n }),\n }),\n );\n\n self.requestPromise.resolve(request ?? {});\n\n // store information for onFinish callback:\n let usage: LanguageModelUsage | undefined;\n let finishReason: LanguageModelV1FinishReason | undefined;\n let providerMetadata: ProviderMetadata | undefined;\n let object: RESULT | undefined;\n let error: unknown | undefined;\n\n // pipe chunks through a transformation stream that extracts metadata:\n let accumulatedText = '';\n let textDelta = '';\n let response: {\n id: string;\n timestamp: Date;\n modelId: string;\n } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n };\n\n // Keep track of raw parse result before type validation, since e.g. Zod might\n // change the object by mapping properties.\n let latestObjectJson: JSONValue | undefined = undefined;\n let latestObject: PARTIAL | undefined = undefined;\n let isFirstChunk = true;\n let isFirstDelta = true;\n\n const transformedStream = stream\n .pipeThrough(new TransformStream(transformer))\n .pipeThrough(\n new TransformStream<\n string | ObjectStreamInputPart,\n ObjectStreamPart<PARTIAL>\n >({\n async transform(chunk, controller): Promise<void> {\n // Telemetry event for first chunk:\n if (isFirstChunk) {\n const msToFirstChunk = now() - startTimestampMs;\n\n isFirstChunk = false;\n\n doStreamSpan.addEvent('ai.stream.firstChunk', {\n 'ai.stream.msToFirstChunk': msToFirstChunk,\n });\n\n doStreamSpan.setAttributes({\n 'ai.stream.msToFirstChunk': msToFirstChunk,\n });\n }\n\n // process partial text chunks\n if (typeof chunk === 'string') {\n accumulatedText += chunk;\n textDelta += chunk;\n\n const { value: currentObjectJson, state: parseState } =\n parsePartialJson(accumulatedText);\n\n if (\n currentObjectJson !== undefined &&\n !isDeepEqualData(latestObjectJson, currentObjectJson)\n ) {\n const validationResult =\n outputStrategy.validatePartialResult({\n value: currentObjectJson,\n textDelta,\n latestObject,\n isFirstDelta,\n isFinalDelta: parseState === 'successful-parse',\n });\n\n if (\n validationResult.success &&\n !isDeepEqualData(\n latestObject,\n validationResult.value.partial,\n )\n ) {\n // inside inner check to correctly parse the final element in array mode:\n latestObjectJson = currentObjectJson;\n latestObject = validationResult.value.partial;\n\n controller.enqueue({\n type: 'object',\n object: latestObject,\n });\n\n controller.enqueue({\n type: 'text-delta',\n textDelta: validationResult.value.textDelta,\n });\n\n textDelta = '';\n isFirstDelta = false;\n }\n }\n\n return;\n }\n\n switch (chunk.type) {\n case 'response-metadata': {\n response = {\n id: chunk.id ?? response.id,\n timestamp: chunk.timestamp ?? response.timestamp,\n modelId: chunk.modelId ?? response.modelId,\n };\n break;\n }\n\n case 'finish': {\n // send final text delta:\n if (textDelta !== '') {\n controller.enqueue({ type: 'text-delta', textDelta });\n }\n\n // store finish reason for telemetry:\n finishReason = chunk.finishReason;\n\n // store usage and metadata for promises and onFinish callback:\n usage = calculateLanguageModelUsage(chunk.usage);\n providerMetadata = chunk.providerMetadata;\n\n controller.enqueue({ ...chunk, usage, response });\n\n // resolve promises that can be resolved now:\n self.usagePromise.resolve(usage);\n self.providerMetadataPromise.resolve(providerMetadata);\n self.responsePromise.resolve({\n ...response,\n headers: rawResponse?.headers,\n });\n\n // resolve the object promise with the latest object:\n const validationResult = outputStrategy.validateFinalResult(\n latestObjectJson,\n {\n text: accumulatedText,\n response,\n usage,\n },\n );\n\n if (validationResult.success) {\n object = validationResult.value;\n self.objectPromise.resolve(object);\n } else {\n error = new NoObjectGeneratedError({\n message:\n 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text: accumulatedText,\n response,\n usage,\n });\n self.objectPromise.reject(error);\n }\n\n break;\n }\n\n default: {\n controller.enqueue(chunk);\n break;\n }\n }\n },\n\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n try {\n const finalUsage = usage ?? {\n promptTokens: NaN,\n completionTokens: NaN,\n totalTokens: NaN,\n };\n\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.object': {\n output: () => JSON.stringify(object),\n },\n 'ai.response.id': response.id,\n 'ai.response.model': response.modelId,\n 'ai.response.timestamp':\n response.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': finalUsage.promptTokens,\n 'ai.usage.completionTokens':\n finalUsage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [finishReason],\n 'gen_ai.response.id': response.id,\n 'gen_ai.response.model': response.modelId,\n 'gen_ai.usage.input_tokens': finalUsage.promptTokens,\n 'gen_ai.usage.output_tokens':\n finalUsage.completionTokens,\n },\n }),\n );\n\n // finish doStreamSpan before other operations for correct timing:\n doStreamSpan.end();\n\n // Add response information to the root span:\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.usage.promptTokens': finalUsage.promptTokens,\n 'ai.usage.completionTokens':\n finalUsage.completionTokens,\n 'ai.response.object': {\n output: () => JSON.stringify(object),\n },\n },\n }),\n );\n\n // call onFinish callback:\n await onFinish?.({\n usage: finalUsage,\n object,\n error,\n response: {\n ...response,\n headers: rawResponse?.headers,\n },\n warnings,\n experimental_providerMetadata: providerMetadata,\n });\n } catch (error) {\n controller.error(error);\n } finally {\n rootSpan.end();\n }\n },\n }),\n );\n\n self.stitchableStream.addStream(transformedStream);\n },\n })\n .catch(error => {\n // add an empty stream with an error to break the stream:\n self.stitchableStream.addStream(\n new ReadableStream({\n start(controller) {\n controller.error(error);\n },\n }),\n );\n })\n .finally(() => {\n self.stitchableStream.close();\n });\n\n this.outputStrategy = outputStrategy;\n }\n\n get object() {\n return this.objectPromise.value;\n }\n\n get usage() {\n return this.usagePromise.value;\n }\n\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n\n get warnings() {\n return this.warningsPromise.value;\n }\n\n get request() {\n return this.requestPromise.value;\n }\n\n get response() {\n return this.responsePromise.value;\n }\n\n get partialObjectStream(): AsyncIterableStream<PARTIAL> {\n return createAsyncIterableStream(\n this.stitchableStream.stream.pipeThrough(\n new TransformStream<ObjectStreamPart<PARTIAL>, PARTIAL>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'object':\n controller.enqueue(chunk.object);\n break;\n\n case 'text-delta':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n );\n }\n\n get elementStream(): ELEMENT_STREAM {\n return this.outputStrategy.createElementStream(\n this.stitchableStream.stream,\n );\n }\n\n get textStream(): AsyncIterableStream<string> {\n return createAsyncIterableStream(\n this.stitchableStream.stream.pipeThrough(\n new TransformStream<ObjectStreamPart<PARTIAL>, string>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'text-delta':\n controller.enqueue(chunk.textDelta);\n break;\n\n case 'object':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n );\n }\n\n get fullStream(): AsyncIterableStream<ObjectStreamPart<PARTIAL>> {\n return createAsyncIterableStream(this.stitchableStream.stream);\n }\n\n pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit) {\n writeToServerResponse({\n response,\n status: init?.status,\n statusText: init?.statusText,\n headers: prepareOutgoingHttpHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream()),\n });\n }\n\n toTextStreamResponse(init?: ResponseInit): Response {\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n });\n }\n}\n\nexport type ObjectStreamInputPart =\n | {\n type: 'error';\n error: unknown;\n }\n | {\n type: 'response-metadata';\n id?: string;\n timestamp?: Date;\n modelId?: string;\n }\n | {\n type: 'finish';\n finishReason: FinishReason;\n logprobs?: LogProbs;\n usage: LanguageModelUsage;\n providerMetadata?: ProviderMetadata;\n };\n","/**\n * Delayed promise. It is only constructed once the value is accessed.\n * This is useful to avoid unhandled promise rejections when the promise is created\n * but not accessed.\n */\nexport class DelayedPromise<T> {\n private status:\n | { type: 'pending' }\n | { type: 'resolved'; value: T }\n | { type: 'rejected'; error: unknown } = { type: 'pending' };\n private promise: Promise<T> | undefined;\n private _resolve: undefined | ((value: T) => void) = undefined;\n private _reject: undefined | ((error: unknown) => void) = undefined;\n\n get value(): Promise<T> {\n if (this.promise) {\n return this.promise;\n }\n\n this.promise = new Promise<T>((resolve, reject) => {\n if (this.status.type === 'resolved') {\n resolve(this.status.value);\n } else if (this.status.type === 'rejected') {\n reject(this.status.error);\n }\n\n this._resolve = resolve;\n this._reject = reject;\n });\n\n return this.promise;\n }\n\n resolve(value: T): void {\n this.status = { type: 'resolved', value };\n\n if (this.promise) {\n this._resolve?.(value);\n }\n }\n\n reject(error: unknown): void {\n this.status = { type: 'rejected', error };\n\n if (this.promise) {\n this._reject?.(error);\n }\n }\n}\n","/**\n * Creates a Promise with externally accessible resolve and reject functions.\n *\n * @template T - The type of the value that the Promise will resolve to.\n * @returns An object containing:\n * - promise: A Promise that can be resolved or rejected externally.\n * - resolve: A function to resolve the Promise with a value of type T.\n * - reject: A function to reject the Promise with an error.\n */\nexport function createResolvablePromise<T = any>(): {\n promise: Promise<T>;\n resolve: (value: T) => void;\n reject: (error: unknown) => void;\n} {\n let resolve: (value: T) => void;\n let reject: (error: unknown) => void;\n\n const promise = new Promise<T>((res, rej) => {\n resolve = res;\n reject = rej;\n });\n\n return {\n promise,\n resolve: resolve!,\n reject: reject!,\n };\n}\n","import { createResolvablePromise } from '../../util/create-resolvable-promise';\n\n/**\n * Creates a stitchable stream that can pipe one stream at a time.\n *\n * @template T - The type of values emitted by the streams.\n * @returns {Object} An object containing the stitchable stream and control methods.\n */\nexport function createStitchableStream<T>(): {\n stream: ReadableStream<T>;\n addStream: (innerStream: ReadableStream<T>) => void;\n close: () => void;\n} {\n let innerStreamReaders: ReadableStreamDefaultReader<T>[] = [];\n let controller: ReadableStreamDefaultController<T> | null = null;\n let isClosed = false;\n let waitForNewStream = createResolvablePromise<void>();\n\n const processPull = async () => {\n // Case 1: Outer stream is closed and no more inner streams\n if (isClosed && innerStreamReaders.length === 0) {\n controller?.close();\n return;\n }\n\n // Case 2: No inner streams available, but outer stream is open\n // wait for a new inner stream to be added or the outer stream to close\n if (innerStreamReaders.length === 0) {\n waitForNewStream = createResolvablePromise<void>();\n await waitForNewStream.promise;\n return processPull();\n }\n\n try {\n const { value, done } = await innerStreamReaders[0].read();\n\n if (done) {\n // Case 3: Current inner stream is done\n innerStreamReaders.shift(); // Remove the finished stream\n\n // Continue pulling from the next stream if available\n if (innerStreamReaders.length > 0) {\n await processPull();\n } else if (isClosed) {\n controller?.close();\n }\n } else {\n // Case 4: Current inner stream returns an item\n controller?.enqueue(value);\n }\n } catch (error) {\n // Case 5: Current inner stream throws an error\n controller?.error(error);\n innerStreamReaders.shift(); // Remove the errored stream\n\n if (isClosed && innerStreamReaders.length === 0) {\n controller?.close();\n }\n }\n };\n\n return {\n stream: new ReadableStream<T>({\n start(controllerParam) {\n controller = controllerParam;\n },\n pull: processPull,\n async cancel() {\n for (const reader of innerStreamReaders) {\n await reader.cancel();\n }\n innerStreamReaders = [];\n isClosed = true;\n },\n }),\n addStream: (innerStream: ReadableStream<T>) => {\n if (isClosed) {\n throw new Error('Cannot add inner stream: outer stream is closed');\n }\n\n innerStreamReaders.push(innerStream.getReader());\n waitForNewStream.resolve();\n },\n close: () => {\n isClosed = true;\n waitForNewStream.resolve();\n\n if (innerStreamReaders.length === 0) {\n controller?.close();\n }\n },\n };\n}\n","// Shim for performance.now() to support environments that don't have it:\nexport function now(): number {\n return globalThis?.performance?.now() ?? Date.now();\n}\n","import { createIdGenerator } from '@ai-sdk/provider-utils';\nimport { Tracer } from '@opentelemetry/api';\nimport { InvalidArgumentError, ToolExecutionError } from '../../errors';\nimport { CoreAssistantMessage, CoreMessage, CoreToolMessage } from '../prompt';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { prepareToolsAndToolChoice } from '../prompt/prepare-tools-and-tool-choice';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool/tool';\nimport { CoreToolChoice, LanguageModel, ProviderMetadata } from '../types';\nimport {\n LanguageModelUsage,\n addLanguageModelUsage,\n calculateLanguageModelUsage,\n} from '../types/usage';\nimport { removeTextAfterLastWhitespace } from '../util/remove-text-after-last-whitespace';\nimport { GenerateTextResult } from './generate-text-result';\nimport { Output } from './output';\nimport { parseToolCall } from './parse-tool-call';\nimport { StepResult } from './step-result';\nimport { toResponseMessages } from './to-response-messages';\nimport { ToolCallArray } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultArray } from './tool-result';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aitxt', size: 24 });\n\n/**\nGenerate a text and call tools for a given prompt using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamText` instead.\n\n@param model - The language model to use.\n\n@param tools - Tools that are accessible to and can be called by the model. The model needs to support calling tools.\n@param toolChoice - The tool choice strategy. Default: 'auto'.\n\n@param system - A system message that will be part of the prompt.\n@param prompt - A simple text prompt. You can either use `prompt` or `messages` but not both.\n@param messages - A list of messages. You can either use `prompt` or `messages` but not both.\n\n@param maxTokens - Maximum number of tokens to generate.\n@param temperature - Temperature setting.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topP - Nucleus sampling.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topK - Only sample from the top K options for each subsequent token.\nUsed to remove \"long tail\" low probability responses.\nRecommended for advanced use cases only. You usually only need to use temperature.\n@param presencePenalty - Presence penalty setting.\nIt affects the likelihood of the model to repeat information that is already in the prompt.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param frequencyPenalty - Frequency penalty setting.\nIt affects the likelihood of the model to repeatedly use the same words or phrases.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param stopSequences - Stop sequences.\nIf set, the model will stop generating text when one of the stop sequences is generated.\n@param seed - The seed (integer) to use for random sampling.\nIf set and supported by the model, calls will generate deterministic results.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@param maxSteps - Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.\n\n@param onStepFinish - Callback that is called when each step (LLM call) is finished, including intermediate steps.\n\n@returns\nA result object that contains the generated text, the results of the tool calls, and additional information.\n */\nexport async function generateText<\n TOOLS extends Record<string, CoreTool>,\n OUTPUT = never,\n>({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_output: output,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n experimental_activeTools: activeTools,\n experimental_repairToolCall: repairToolCall,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n onStepFinish,\n ...settings\n}: CallSettings &\n Prompt & {\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe tools that the model can call. The model needs to support calling tools.\n*/\n tools?: TOOLS;\n\n /**\nThe tool choice strategy. Default: 'auto'.\n */\n toolChoice?: CoreToolChoice<TOOLS>;\n\n /**\nMaximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.\n\nA maximum number is required to prevent infinite loops in the case of misconfigured tools.\n\nBy default, it's set to 1, which means that only a single LLM call is made.\n */\n maxSteps?: number;\n\n /**\nWhen enabled, the model will perform additional steps if the finish reason is \"length\" (experimental).\n\nBy default, it's set to false.\n */\n experimental_continueSteps?: boolean;\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nLimits the tools that are available for the model to call without\nchanging the tool call and result types in the result.\n */\n experimental_activeTools?: Array<keyof TOOLS>;\n\n experimental_output?: Output<OUTPUT>;\n\n /**\nA function that attempts to repair a tool call that failed to parse.\n */\n experimental_repairToolCall?: ToolCallRepairFunction<TOOLS>;\n\n /**\n Callback that is called when each step (LLM call) is finished, including intermediate steps.\n */\n onStepFinish?: (event: StepResult<TOOLS>) => Promise<void> | void;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): Promise<GenerateTextResult<TOOLS, OUTPUT>> {\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxSteps',\n value: maxSteps,\n message: 'maxSteps must be at least 1',\n });\n }\n\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const initialPrompt = standardizePrompt({\n prompt: {\n system: output?.injectIntoSystemPrompt({ system, model }) ?? system,\n prompt,\n messages,\n },\n tools,\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.generateText',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateText',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.settings.maxSteps': maxSteps,\n },\n }),\n tracer,\n fn: async span => {\n const mode = {\n type: 'regular' as const,\n ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools }),\n };\n\n const callSettings = prepareCallSettings(settings);\n\n let currentModelResponse: Awaited<\n ReturnType<LanguageModel['doGenerate']>\n > & { response: { id: string; timestamp: Date; modelId: string } };\n let currentToolCalls: ToolCallArray<TOOLS> = [];\n let currentToolResults: ToolResultArray<TOOLS> = [];\n let stepCount = 0;\n const responseMessages: Array<CoreAssistantMessage | CoreToolMessage> =\n [];\n let text = '';\n const steps: GenerateTextResult<TOOLS, OUTPUT>['steps'] = [];\n let usage: LanguageModelUsage = {\n completionTokens: 0,\n promptTokens: 0,\n totalTokens: 0,\n };\n\n let stepType: 'initial' | 'tool-result' | 'continue' | 'done' = 'initial';\n\n do {\n // after the 1st step, we need to switch to messages format:\n const promptFormat = stepCount === 0 ? initialPrompt.type : 'messages';\n\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages,\n ];\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages,\n },\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n currentModelResponse = await retry(() =>\n recordSpan({\n name: 'ai.generateText.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateText.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': { input: () => promptFormat },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.prompt.tools': {\n // convert the language model level tools:\n input: () => mode.tools?.map(tool => JSON.stringify(tool)),\n },\n 'ai.prompt.toolChoice': {\n input: () =>\n mode.toolChoice != null\n ? JSON.stringify(mode.toolChoice)\n : undefined,\n },\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.stop_sequences': settings.stopSequences,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode,\n ...callSettings,\n inputFormat: promptFormat,\n responseFormat: output?.responseFormat({ model }),\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n // Fill in default values:\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.text': {\n output: () => result.text,\n },\n 'ai.response.toolCalls': {\n output: () => JSON.stringify(result.toolCalls),\n },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens': result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.input_tokens': result.usage.promptTokens,\n 'gen_ai.usage.output_tokens': result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, response: responseData };\n },\n }),\n );\n\n // parse tool calls:\n currentToolCalls = await Promise.all(\n (currentModelResponse.toolCalls ?? []).map(toolCall =>\n parseToolCall({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages: stepInputMessages,\n }),\n ),\n );\n\n // execute tools:\n currentToolResults =\n tools == null\n ? []\n : await executeTools({\n toolCalls: currentToolCalls,\n tools,\n tracer,\n telemetry,\n messages: stepInputMessages,\n abortSignal,\n });\n\n // token usage:\n const currentUsage = calculateLanguageModelUsage(\n currentModelResponse.usage,\n );\n usage = addLanguageModelUsage(usage, currentUsage);\n\n // check if another step is needed:\n let nextStepType: 'done' | 'continue' | 'tool-result' = 'done';\n if (++stepCount < maxSteps) {\n if (\n continueSteps &&\n currentModelResponse.finishReason === 'length' &&\n // only use continue when there are no tool calls:\n currentToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n currentToolCalls.length > 0 &&\n // all current tool calls have results:\n currentToolResults.length === currentToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // text:\n const originalText = currentModelResponse.text ?? '';\n const stepTextLeadingWhitespaceTrimmed =\n stepType === 'continue' && // only for continue steps\n text.trimEnd() !== text // only trim when there is preceding whitespace\n ? originalText.trimStart()\n : originalText;\n const stepText =\n nextStepType === 'continue'\n ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed)\n : stepTextLeadingWhitespaceTrimmed;\n\n text =\n nextStepType === 'continue' || stepType === 'continue'\n ? text + stepText\n : stepText;\n\n // append to messages for potential next step:\n if (stepType === 'continue') {\n // continue step: update the last assistant message\n // continue is only possible when there are no tool calls,\n // so we can assume that there is a single last assistant message:\n const lastMessage = responseMessages[\n responseMessages.length - 1\n ] as CoreAssistantMessage;\n\n if (typeof lastMessage.content === 'string') {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: 'text',\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text,\n tools: tools ?? ({} as TOOLS),\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n }),\n );\n }\n\n // Add step information (after response messages are updated):\n const currentStepResult: StepResult<TOOLS> = {\n stepType,\n text: stepText,\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage: currentUsage,\n warnings: currentModelResponse.warnings,\n logprobs: currentModelResponse.logprobs,\n request: currentModelResponse.request ?? {},\n response: {\n ...currentModelResponse.response,\n headers: currentModelResponse.rawResponse?.headers,\n\n // deep clone msgs to avoid mutating past messages in multi-step:\n messages: JSON.parse(JSON.stringify(responseMessages)),\n },\n experimental_providerMetadata: currentModelResponse.providerMetadata,\n isContinued: nextStepType === 'continue',\n };\n steps.push(currentStepResult);\n await onStepFinish?.(currentStepResult);\n\n stepType = nextStepType;\n } while (stepType !== 'done');\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': currentModelResponse.finishReason,\n 'ai.response.text': {\n output: () => currentModelResponse.text,\n },\n 'ai.response.toolCalls': {\n output: () => JSON.stringify(currentModelResponse.toolCalls),\n },\n\n 'ai.usage.promptTokens': currentModelResponse.usage.promptTokens,\n 'ai.usage.completionTokens':\n currentModelResponse.usage.completionTokens,\n },\n }),\n );\n\n return new DefaultGenerateTextResult({\n text,\n output:\n output == null\n ? (undefined as never)\n : output.parseOutput(\n { text },\n {\n response: currentModelResponse.response,\n usage,\n },\n ),\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage,\n warnings: currentModelResponse.warnings,\n request: currentModelResponse.request ?? {},\n response: {\n ...currentModelResponse.response,\n headers: currentModelResponse.rawResponse?.headers,\n messages: responseMessages,\n },\n logprobs: currentModelResponse.logprobs,\n steps,\n providerMetadata: currentModelResponse.providerMetadata,\n });\n },\n });\n}\n\nasync function executeTools<TOOLS extends Record<string, CoreTool>>({\n toolCalls,\n tools,\n tracer,\n telemetry,\n messages,\n abortSignal,\n}: {\n toolCalls: ToolCallArray<TOOLS>;\n tools: TOOLS;\n tracer: Tracer;\n telemetry: TelemetrySettings | undefined;\n messages: CoreMessage[];\n abortSignal: AbortSignal | undefined;\n}): Promise<ToolResultArray<TOOLS>> {\n const toolResults = await Promise.all(\n toolCalls.map(async ({ toolCallId, toolName, args }) => {\n const tool = tools[toolName];\n\n if (tool?.execute == null) {\n return undefined;\n }\n\n const result = await recordSpan({\n name: 'ai.toolCall',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.toolCall',\n telemetry,\n }),\n 'ai.toolCall.name': toolName,\n 'ai.toolCall.id': toolCallId,\n 'ai.toolCall.args': {\n output: () => JSON.stringify(args),\n },\n },\n }),\n tracer,\n fn: async span => {\n try {\n const result = await tool.execute!(args, {\n toolCallId,\n messages,\n abortSignal,\n });\n\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.toolCall.result': {\n output: () => JSON.stringify(result),\n },\n },\n }),\n );\n } catch (ignored) {\n // JSON stringify might fail if the result is not serializable,\n // in which case we just ignore it. In the future we might want to\n // add an optional serialize method to the tool interface and warn\n // if the result is not serializable.\n }\n\n return result;\n } catch (error) {\n throw new ToolExecutionError({\n toolName,\n toolArgs: args,\n cause: error,\n });\n }\n },\n });\n\n return {\n type: 'tool-result',\n toolCallId,\n toolName,\n args,\n result,\n } as ToolResultArray<TOOLS>[number];\n }),\n );\n\n return toolResults.filter(\n (result): result is NonNullable<typeof result> => result != null,\n );\n}\n\nclass DefaultGenerateTextResult<TOOLS extends Record<string, CoreTool>, OUTPUT>\n implements GenerateTextResult<TOOLS, OUTPUT>\n{\n readonly text: GenerateTextResult<TOOLS, OUTPUT>['text'];\n readonly toolCalls: GenerateTextResult<TOOLS, OUTPUT>['toolCalls'];\n readonly toolResults: GenerateTextResult<TOOLS, OUTPUT>['toolResults'];\n readonly finishReason: GenerateTextResult<TOOLS, OUTPUT>['finishReason'];\n readonly usage: GenerateTextResult<TOOLS, OUTPUT>['usage'];\n readonly warnings: GenerateTextResult<TOOLS, OUTPUT>['warnings'];\n readonly steps: GenerateTextResult<TOOLS, OUTPUT>['steps'];\n readonly logprobs: GenerateTextResult<TOOLS, OUTPUT>['logprobs'];\n readonly experimental_providerMetadata: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_providerMetadata'];\n readonly response: GenerateTextResult<TOOLS, OUTPUT>['response'];\n readonly request: GenerateTextResult<TOOLS, OUTPUT>['request'];\n readonly experimental_output: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_output'];\n\n constructor(options: {\n text: GenerateTextResult<TOOLS, OUTPUT>['text'];\n toolCalls: GenerateTextResult<TOOLS, OUTPUT>['toolCalls'];\n toolResults: GenerateTextResult<TOOLS, OUTPUT>['toolResults'];\n finishReason: GenerateTextResult<TOOLS, OUTPUT>['finishReason'];\n usage: GenerateTextResult<TOOLS, OUTPUT>['usage'];\n warnings: GenerateTextResult<TOOLS, OUTPUT>['warnings'];\n logprobs: GenerateTextResult<TOOLS, OUTPUT>['logprobs'];\n steps: GenerateTextResult<TOOLS, OUTPUT>['steps'];\n providerMetadata: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_providerMetadata'];\n response: GenerateTextResult<TOOLS, OUTPUT>['response'];\n request: GenerateTextResult<TOOLS, OUTPUT>['request'];\n output: GenerateTextResult<TOOLS, OUTPUT>['experimental_output'];\n }) {\n this.text = options.text;\n this.toolCalls = options.toolCalls;\n this.toolResults = options.toolResults;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.request = options.request;\n this.response = options.response;\n this.steps = options.steps;\n this.experimental_providerMetadata = options.providerMetadata;\n this.logprobs = options.logprobs;\n this.experimental_output = options.output;\n }\n}\n","export {\n AISDKError,\n APICallError,\n EmptyResponseBodyError,\n InvalidPromptError,\n InvalidResponseDataError,\n JSONParseError,\n LoadAPIKeyError,\n NoContentGeneratedError,\n NoSuchModelError,\n TypeValidationError,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport { InvalidArgumentError } from './invalid-argument-error';\nexport { InvalidToolArgumentsError } from './invalid-tool-arguments-error';\nexport { NoObjectGeneratedError } from './no-object-generated-error';\nexport { NoSuchToolError } from './no-such-tool-error';\nexport { ToolCallRepairError } from './tool-call-repair-error';\nexport { ToolExecutionError } from './tool-execution-error';\n\nexport { InvalidDataContentError } from '../core/prompt/invalid-data-content-error';\nexport { InvalidMessageRoleError } from '../core/prompt/invalid-message-role-error';\nexport { MessageConversionError } from '../core/prompt/message-conversion-error';\nexport { DownloadError } from '../util/download-error';\nexport { RetryError } from '../util/retry-error';\n","import { AISDKError, getErrorMessage } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidToolArgumentsError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidToolArgumentsError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly toolArgs: string;\n\n constructor({\n toolArgs,\n toolName,\n cause,\n message = `Invalid arguments for tool ${toolName}: ${getErrorMessage(\n cause,\n )}`,\n }: {\n message?: string;\n toolArgs: string;\n toolName: string;\n cause: unknown;\n }) {\n super({ name, message, cause });\n\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n }\n\n static isInstance(error: unknown): error is InvalidToolArgumentsError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_NoSuchToolError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchToolError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly availableTools: string[] | undefined;\n\n constructor({\n toolName,\n availableTools = undefined,\n message = `Model tried to call unavailable tool '${toolName}'. ${\n availableTools === undefined\n ? 'No tools are available.'\n : `Available tools: ${availableTools.join(', ')}.`\n }`,\n }: {\n toolName: string;\n availableTools?: string[] | undefined;\n message?: string;\n }) {\n super({ name, message });\n\n this.toolName = toolName;\n this.availableTools = availableTools;\n }\n\n static isInstance(error: unknown): error is NoSuchToolError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError, getErrorMessage } from '@ai-sdk/provider';\nimport { InvalidToolArgumentsError } from './invalid-tool-arguments-error';\nimport { NoSuchToolError } from './no-such-tool-error';\n\nconst name = 'AI_ToolCallRepairError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class ToolCallRepairError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly originalError: NoSuchToolError | InvalidToolArgumentsError;\n\n constructor({\n cause,\n originalError,\n message = `Error repairing tool call: ${getErrorMessage(cause)}`,\n }: {\n message?: string;\n cause: unknown;\n originalError: NoSuchToolError | InvalidToolArgumentsError;\n }) {\n super({ name, message, cause });\n this.originalError = originalError;\n }\n\n static isInstance(error: unknown): error is ToolCallRepairError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError, getErrorMessage, JSONValue } from '@ai-sdk/provider';\n\nconst name = 'AI_ToolExecutionError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class ToolExecutionError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly toolArgs: JSONValue;\n\n constructor({\n toolArgs,\n toolName,\n cause,\n message = `Error executing tool ${toolName}: ${getErrorMessage(cause)}`,\n }: {\n message?: string;\n toolArgs: JSONValue;\n toolName: string;\n cause: unknown;\n }) {\n super({ name, message, cause });\n\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n }\n\n static isInstance(error: unknown): error is ToolExecutionError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import {\n LanguageModelV1FunctionTool,\n LanguageModelV1ProviderDefinedTool,\n LanguageModelV1ToolChoice,\n} from '@ai-sdk/provider';\nimport { asSchema } from '@ai-sdk/ui-utils';\nimport { CoreTool } from '../tool/tool';\nimport { CoreToolChoice } from '../types/language-model';\nimport { isNonEmptyObject } from '../util/is-non-empty-object';\n\nexport function prepareToolsAndToolChoice<\n TOOLS extends Record<string, CoreTool>,\n>({\n tools,\n toolChoice,\n activeTools,\n}: {\n tools: TOOLS | undefined;\n toolChoice: CoreToolChoice<TOOLS> | undefined;\n activeTools: Array<keyof TOOLS> | undefined;\n}): {\n tools:\n | Array<LanguageModelV1FunctionTool | LanguageModelV1ProviderDefinedTool>\n | undefined;\n toolChoice: LanguageModelV1ToolChoice | undefined;\n} {\n if (!isNonEmptyObject(tools)) {\n return {\n tools: undefined,\n toolChoice: undefined,\n };\n }\n\n // when activeTools is provided, we only include the tools that are in the list:\n const filteredTools =\n activeTools != null\n ? Object.entries(tools).filter(([name]) =>\n activeTools.includes(name as keyof TOOLS),\n )\n : Object.entries(tools);\n\n return {\n tools: filteredTools.map(([name, tool]) => {\n const toolType = tool.type;\n switch (toolType) {\n case undefined:\n case 'function':\n return {\n type: 'function' as const,\n name,\n description: tool.description,\n parameters: asSchema(tool.parameters).jsonSchema,\n };\n case 'provider-defined':\n return {\n type: 'provider-defined' as const,\n name,\n id: tool.id,\n args: tool.args,\n };\n default: {\n const exhaustiveCheck: never = toolType;\n throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);\n }\n }\n }),\n toolChoice:\n toolChoice == null\n ? { type: 'auto' }\n : typeof toolChoice === 'string'\n ? { type: toolChoice }\n : { type: 'tool' as const, toolName: toolChoice.toolName as string },\n };\n}\n","export function isNonEmptyObject(\n object: Record<string, unknown> | undefined | null,\n): object is Record<string, unknown> {\n return object != null && Object.keys(object).length > 0;\n}\n","const lastWhitespaceRegexp = /^([\\s\\S]*?)(\\s+)(\\S*)$/;\n\n/**\n * Splits the text on the last whitespace.\n *\n * Whitespace is defined as one or more whitespace characters,\n * e.g. space, tab, newline, etc.\n *\n * @param text - The text to split.\n * @returns The prefix, whitespace, and suffix. Undefined if there is no whitespace.\n */\nexport function splitOnLastWhitespace(text: string):\n | {\n prefix: string;\n whitespace: string;\n suffix: string;\n }\n | undefined {\n const match = text.match(lastWhitespaceRegexp);\n return match\n ? { prefix: match[1], whitespace: match[2], suffix: match[3] }\n : undefined;\n}\n","import { splitOnLastWhitespace } from './split-on-last-whitespace';\n\nexport function removeTextAfterLastWhitespace(text: string): string {\n const match = splitOnLastWhitespace(text);\n return match ? match.prefix + match.whitespace : text;\n}\n","import { LanguageModelV1FunctionToolCall } from '@ai-sdk/provider';\nimport { safeParseJSON, safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { Schema, asSchema } from '@ai-sdk/ui-utils';\nimport { InvalidToolArgumentsError } from '../../errors/invalid-tool-arguments-error';\nimport { NoSuchToolError } from '../../errors/no-such-tool-error';\nimport { CoreMessage } from '../prompt';\nimport { CoreTool } from '../tool';\nimport { inferParameters } from '../tool/tool';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolCallRepairError } from '../../errors/tool-call-repair-error';\n\nexport async function parseToolCall<TOOLS extends Record<string, CoreTool>>({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages,\n}: {\n toolCall: LanguageModelV1FunctionToolCall;\n tools: TOOLS | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n system: string | undefined;\n messages: CoreMessage[];\n}): Promise<ToolCallUnion<TOOLS>> {\n if (tools == null) {\n throw new NoSuchToolError({ toolName: toolCall.toolName });\n }\n\n try {\n return await doParseToolCall({ toolCall, tools });\n } catch (error) {\n if (\n repairToolCall == null ||\n !(\n NoSuchToolError.isInstance(error) ||\n InvalidToolArgumentsError.isInstance(error)\n )\n ) {\n throw error;\n }\n\n let repairedToolCall: LanguageModelV1FunctionToolCall | null = null;\n\n try {\n repairedToolCall = await repairToolCall({\n toolCall,\n tools,\n parameterSchema: ({ toolName }) =>\n asSchema(tools[toolName].parameters).jsonSchema,\n system,\n messages,\n error,\n });\n } catch (repairError) {\n throw new ToolCallRepairError({\n cause: repairError,\n originalError: error,\n });\n }\n\n // no repaired tool call returned\n if (repairedToolCall == null) {\n throw error;\n }\n\n return await doParseToolCall({ toolCall: repairedToolCall, tools });\n }\n}\n\nasync function doParseToolCall<TOOLS extends Record<string, CoreTool>>({\n toolCall,\n tools,\n}: {\n toolCall: LanguageModelV1FunctionToolCall;\n tools: TOOLS;\n}): Promise<ToolCallUnion<TOOLS>> {\n const toolName = toolCall.toolName as keyof TOOLS & string;\n\n const tool = tools[toolName];\n\n if (tool == null) {\n throw new NoSuchToolError({\n toolName: toolCall.toolName,\n availableTools: Object.keys(tools),\n });\n }\n\n const schema = asSchema(tool.parameters) as Schema<\n inferParameters<TOOLS[keyof TOOLS]['parameters']>\n >;\n\n // when the tool call has no arguments, we try passing an empty object to the schema\n // (many LLMs generate empty strings for tool calls with no arguments)\n const parseResult =\n toolCall.args.trim() === ''\n ? safeValidateTypes({ value: {}, schema })\n : safeParseJSON({ text: toolCall.args, schema });\n\n if (parseResult.success === false) {\n throw new InvalidToolArgumentsError({\n toolName,\n toolArgs: toolCall.args,\n cause: parseResult.error,\n });\n }\n\n return {\n type: 'tool-call',\n toolCallId: toolCall.toolCallId,\n toolName,\n args: parseResult.value,\n };\n}\n","import {\n CoreAssistantMessage,\n CoreToolMessage,\n ToolResultPart,\n} from '../prompt';\nimport { CoreTool } from '../tool/tool';\nimport { ToolCallArray } from './tool-call';\nimport { ToolResultArray } from './tool-result';\n\n/**\nConverts the result of a `generateText` call to a list of response messages.\n */\nexport function toResponseMessages<TOOLS extends Record<string, CoreTool>>({\n text = '',\n tools,\n toolCalls,\n toolResults,\n}: {\n text: string | undefined;\n tools: TOOLS;\n toolCalls: ToolCallArray<TOOLS>;\n toolResults: ToolResultArray<TOOLS>;\n}): Array<CoreAssistantMessage | CoreToolMessage> {\n const responseMessages: Array<CoreAssistantMessage | CoreToolMessage> = [];\n\n responseMessages.push({\n role: 'assistant',\n content: [{ type: 'text', text }, ...toolCalls],\n });\n\n if (toolResults.length > 0) {\n responseMessages.push({\n role: 'tool',\n content: toolResults.map((toolResult): ToolResultPart => {\n const tool = tools[toolResult.toolName];\n return tool?.experimental_toToolResultContent != null\n ? {\n type: 'tool-result',\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: tool.experimental_toToolResultContent(toolResult.result),\n experimental_content: tool.experimental_toToolResultContent(\n toolResult.result,\n ),\n }\n : {\n type: 'tool-result',\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: toolResult.result,\n };\n }),\n });\n }\n\n return responseMessages;\n}\n","import { safeParseJSON, safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { asSchema, Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors';\nimport { injectJsonInstruction } from '../generate-object/inject-json-instruction';\nimport {\n LanguageModel,\n LanguageModelV1CallOptions,\n} from '../types/language-model';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { LanguageModelUsage } from '../types/usage';\n\nexport interface Output<OUTPUT> {\n readonly type: 'object' | 'text';\n injectIntoSystemPrompt(options: {\n system: string | undefined;\n model: LanguageModel;\n }): string | undefined;\n responseFormat: (options: {\n model: LanguageModel;\n }) => LanguageModelV1CallOptions['responseFormat'];\n parseOutput(\n options: { text: string },\n context: {\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): OUTPUT;\n}\n\nexport const text = (): Output<string> => ({\n type: 'text',\n responseFormat: () => ({ type: 'text' }),\n injectIntoSystemPrompt({ system }: { system: string | undefined }) {\n return system;\n },\n parseOutput({ text }: { text: string }) {\n return text;\n },\n});\n\nexport const object = <OUTPUT>({\n schema: inputSchema,\n}: {\n schema: z.Schema<OUTPUT, z.ZodTypeDef, any> | Schema<OUTPUT>;\n}): Output<OUTPUT> => {\n const schema = asSchema(inputSchema);\n\n return {\n type: 'object',\n responseFormat: ({ model }) => ({\n type: 'json',\n schema: model.supportsStructuredOutputs ? schema.jsonSchema : undefined,\n }),\n injectIntoSystemPrompt({ system, model }) {\n // when the model supports structured outputs,\n // we can use the system prompt as is:\n return model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: schema.jsonSchema,\n });\n },\n parseOutput(\n { text }: { text: string },\n context: {\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ) {\n const parseResult = safeParseJSON({ text });\n\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: could not parse the response.',\n cause: parseResult.error,\n text,\n response: context.response,\n usage: context.usage,\n });\n }\n\n const validationResult = safeValidateTypes({\n value: parseResult.value,\n schema,\n });\n\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text,\n response: context.response,\n usage: context.usage,\n });\n }\n\n return validationResult.value;\n },\n };\n};\n","import { createIdGenerator } from '@ai-sdk/provider-utils';\nimport { DataStreamString, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { Span } from '@opentelemetry/api';\nimport { ServerResponse } from 'node:http';\nimport { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { StreamData } from '../../streams/stream-data';\nimport { DelayedPromise } from '../../util/delayed-promise';\nimport { DataStreamWriter } from '../data-stream/data-stream-writer';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { CoreAssistantMessage, CoreToolMessage } from '../prompt/message';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { prepareToolsAndToolChoice } from '../prompt/prepare-tools-and-tool-choice';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool';\nimport {\n CoreToolChoice,\n FinishReason,\n LanguageModel,\n LogProbs,\n} from '../types/language-model';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { ProviderMetadata } from '../types/provider-metadata';\nimport { addLanguageModelUsage, LanguageModelUsage } from '../types/usage';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { createStitchableStream } from '../util/create-stitchable-stream';\nimport { mergeStreams } from '../util/merge-streams';\nimport { now as originalNow } from '../util/now';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { splitOnLastWhitespace } from '../util/split-on-last-whitespace';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport {\n runToolsTransformation,\n SingleRequestTextStreamPart,\n} from './run-tools-transformation';\nimport { StepResult } from './step-result';\nimport { StreamTextResult, TextStreamPart } from './stream-text-result';\nimport { toResponseMessages } from './to-response-messages';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultUnion } from './tool-result';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aitxt', size: 24 });\n\n/**\nGenerate a text and call tools for a given prompt using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateText` instead.\n\n@param model - The language model to use.\n@param tools - Tools that are accessible to and can be called by the model. The model needs to support calling tools.\n\n@param system - A system message that will be part of the prompt.\n@param prompt - A simple text prompt. You can either use `prompt` or `messages` but not both.\n@param messages - A list of messages. You can either use `prompt` or `messages` but not both.\n\n@param maxTokens - Maximum number of tokens to generate.\n@param temperature - Temperature setting.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topP - Nucleus sampling.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topK - Only sample from the top K options for each subsequent token.\nUsed to remove \"long tail\" low probability responses.\nRecommended for advanced use cases only. You usually only need to use temperature.\n@param presencePenalty - Presence penalty setting.\nIt affects the likelihood of the model to repeat information that is already in the prompt.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param frequencyPenalty - Frequency penalty setting.\nIt affects the likelihood of the model to repeatedly use the same words or phrases.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param stopSequences - Stop sequences.\nIf set, the model will stop generating text when one of the stop sequences is generated.\n@param seed - The seed (integer) to use for random sampling.\nIf set and supported by the model, calls will generate deterministic results.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@param maxSteps - Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.\n\n@param onChunk - Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.\n@param onStepFinish - Callback that is called when each step (LLM call) is finished, including intermediate steps.\n@param onFinish - Callback that is called when the LLM response and all request tool executions\n(for tools that have an `execute` function) are finished.\n\n@return\nA result object for accessing different stream types and additional information.\n */\nexport function streamText<TOOLS extends Record<string, CoreTool>>({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n experimental_toolCallStreaming: toolCallStreaming = false,\n experimental_activeTools: activeTools,\n experimental_repairToolCall: repairToolCall,\n experimental_transform: transform,\n onChunk,\n onFinish,\n onStepFinish,\n _internal: {\n now = originalNow,\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n ...settings\n}: CallSettings &\n Prompt & {\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe tools that the model can call. The model needs to support calling tools.\n */\n tools?: TOOLS;\n\n /**\nThe tool choice strategy. Default: 'auto'.\n */\n toolChoice?: CoreToolChoice<TOOLS>;\n\n /**\nMaximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.\n\nA maximum number is required to prevent infinite loops in the case of misconfigured tools.\n\nBy default, it's set to 1, which means that only a single LLM call is made.\n */\n maxSteps?: number;\n\n /**\nWhen enabled, the model will perform additional steps if the finish reason is \"length\" (experimental).\n\nBy default, it's set to false.\n */\n experimental_continueSteps?: boolean;\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nLimits the tools that are available for the model to call without\nchanging the tool call and result types in the result.\n */\n experimental_activeTools?: Array<keyof TOOLS>;\n\n /**\nA function that attempts to repair a tool call that failed to parse.\n */\n experimental_repairToolCall?: ToolCallRepairFunction<TOOLS>;\n\n /**\nEnable streaming of tool call deltas as they are generated. Disabled by default.\n */\n experimental_toolCallStreaming?: boolean;\n\n /**\nOptional transformation that is applied to the stream.\n */\n experimental_transform?: (options: {\n tools: TOOLS; // for type inference\n }) => TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>;\n\n /**\nCallback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.\n */\n onChunk?: (event: {\n chunk: Extract<\n TextStreamPart<TOOLS>,\n {\n type:\n | 'text-delta'\n | 'tool-call'\n | 'tool-call-streaming-start'\n | 'tool-call-delta'\n | 'tool-result';\n }\n >;\n }) => Promise<void> | void;\n\n /**\nCallback that is called when the LLM response and all request tool executions\n(for tools that have an `execute` function) are finished.\n\nThe usage is the combined usage of all steps.\n */\n onFinish?: (\n event: Omit<StepResult<TOOLS>, 'stepType' | 'isContinued'> & {\n /**\nDetails for all steps.\n */\n readonly steps: StepResult<TOOLS>[];\n },\n ) => Promise<void> | void;\n\n /**\n Callback that is called when each step (LLM call) is finished, including intermediate steps.\n */\n onStepFinish?: (event: StepResult<TOOLS>) => Promise<void> | void;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n now?: () => number;\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): StreamTextResult<TOOLS> {\n return new DefaultStreamTextResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transform: transform?.({ tools: tools as TOOLS }),\n activeTools,\n repairToolCall,\n maxSteps,\n continueSteps,\n providerMetadata,\n onChunk,\n onFinish,\n onStepFinish,\n now,\n currentDate,\n generateId,\n });\n}\n\nclass DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>>\n implements StreamTextResult<TOOLS>\n{\n private readonly warningsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['warnings']>\n >();\n private readonly usagePromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['usage']>\n >();\n private readonly finishReasonPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['finishReason']>\n >();\n private readonly providerMetadataPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['experimental_providerMetadata']>\n >();\n private readonly textPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['text']>\n >();\n private readonly toolCallsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['toolCalls']>\n >();\n private readonly toolResultsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['toolResults']>\n >();\n private readonly requestPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['request']>\n >();\n private readonly responsePromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['response']>\n >();\n private readonly stepsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['steps']>\n >();\n\n private readonly addStream: (\n stream: ReadableStream<TextStreamPart<TOOLS>>,\n ) => void;\n\n private readonly closeStream: () => void;\n\n private baseStream: ReadableStream<TextStreamPart<TOOLS>>;\n\n constructor({\n model,\n telemetry,\n headers,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transform,\n activeTools,\n repairToolCall,\n maxSteps,\n continueSteps,\n providerMetadata,\n onChunk,\n onFinish,\n onStepFinish,\n now,\n currentDate,\n generateId,\n }: {\n model: LanguageModel;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n maxRetries: number | undefined;\n abortSignal: AbortSignal | undefined;\n system: Prompt['system'];\n prompt: Prompt['prompt'];\n messages: Prompt['messages'];\n tools: TOOLS | undefined;\n toolChoice: CoreToolChoice<TOOLS> | undefined;\n toolCallStreaming: boolean;\n transform:\n | TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>\n | undefined;\n activeTools: Array<keyof TOOLS> | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n maxSteps: number;\n continueSteps: boolean;\n providerMetadata: ProviderMetadata | undefined;\n onChunk:\n | undefined\n | ((event: {\n chunk: Extract<\n TextStreamPart<TOOLS>,\n {\n type:\n | 'text-delta'\n | 'tool-call'\n | 'tool-call-streaming-start'\n | 'tool-call-delta'\n | 'tool-result';\n }\n >;\n }) => Promise<void> | void);\n onFinish:\n | undefined\n | ((\n event: Omit<StepResult<TOOLS>, 'stepType' | 'isContinued'> & {\n readonly steps: StepResult<TOOLS>[];\n },\n ) => Promise<void> | void);\n onStepFinish:\n | undefined\n | ((event: StepResult<TOOLS>) => Promise<void> | void);\n now: () => number;\n currentDate: () => Date;\n generateId: () => string;\n }) {\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxSteps',\n value: maxSteps,\n message: 'maxSteps must be at least 1',\n });\n }\n\n // event processor for telemetry, invoking callbacks, etc.\n // The event processor reads the transformed stream to enable correct\n // recording of the final transformed outputs.\n let recordedStepText = '';\n let recordedContinuationText = '';\n let recordedFullText = '';\n let recordedRequest: LanguageModelRequestMetadata | undefined = undefined;\n const recordedResponse: LanguageModelResponseMetadata & {\n messages: Array<CoreAssistantMessage | CoreToolMessage>;\n } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n messages: [],\n };\n let recordedToolCalls: ToolCallUnion<TOOLS>[] = [];\n let recordedToolResults: ToolResultUnion<TOOLS>[] = [];\n let recordedFinishReason: FinishReason | undefined = undefined;\n let recordedUsage: LanguageModelUsage | undefined = undefined;\n let recordedProviderMetadata: ProviderMetadata | undefined = undefined;\n let stepType: 'initial' | 'continue' | 'tool-result' = 'initial';\n const recordedSteps: StepResult<TOOLS>[] = [];\n let rootSpan!: Span;\n\n const eventProcessor = new TransformStream<\n TextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller) {\n controller.enqueue(chunk); // forward the chunk to the next stream\n\n if (\n chunk.type === 'text-delta' ||\n chunk.type === 'tool-call' ||\n chunk.type === 'tool-result' ||\n chunk.type === 'tool-call-streaming-start' ||\n chunk.type === 'tool-call-delta'\n ) {\n await onChunk?.({ chunk });\n }\n\n if (chunk.type === 'text-delta') {\n recordedStepText += chunk.textDelta;\n recordedContinuationText += chunk.textDelta;\n recordedFullText += chunk.textDelta;\n }\n\n if (chunk.type === 'tool-call') {\n recordedToolCalls.push(chunk);\n }\n\n if (chunk.type === 'tool-result') {\n recordedToolResults.push(chunk);\n }\n\n if (chunk.type === 'step-finish') {\n const stepMessages = toResponseMessages({\n text: recordedContinuationText,\n tools: tools ?? ({} as TOOLS),\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n });\n\n // determine the next step type\n const currentStep = recordedSteps.length;\n let nextStepType: 'done' | 'continue' | 'tool-result' = 'done';\n if (currentStep + 1 < maxSteps) {\n if (\n continueSteps &&\n chunk.finishReason === 'length' &&\n // only use continue when there are no tool calls:\n recordedToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n recordedToolCalls.length > 0 &&\n // all current tool calls have results:\n recordedToolResults.length === recordedToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // Add step information (after response messages are updated):\n const currentStepResult: StepResult<TOOLS> = {\n stepType,\n text: recordedStepText,\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n finishReason: chunk.finishReason,\n usage: chunk.usage,\n warnings: chunk.warnings,\n logprobs: chunk.logprobs,\n request: chunk.request,\n response: {\n ...chunk.response,\n messages: [...recordedResponse.messages, ...stepMessages],\n },\n experimental_providerMetadata: chunk.experimental_providerMetadata,\n isContinued: chunk.isContinued,\n };\n\n await onStepFinish?.(currentStepResult);\n\n recordedSteps.push(currentStepResult);\n\n recordedToolCalls = [];\n recordedToolResults = [];\n recordedStepText = '';\n recordedRequest = chunk.request;\n\n if (nextStepType !== 'done') {\n stepType = nextStepType;\n }\n\n if (nextStepType !== 'continue') {\n recordedResponse.messages.push(...stepMessages);\n recordedContinuationText = '';\n }\n }\n\n if (chunk.type === 'finish') {\n recordedResponse.id = chunk.response.id;\n recordedResponse.timestamp = chunk.response.timestamp;\n recordedResponse.modelId = chunk.response.modelId;\n recordedResponse.headers = chunk.response.headers;\n recordedUsage = chunk.usage;\n recordedFinishReason = chunk.finishReason;\n recordedProviderMetadata = chunk.experimental_providerMetadata;\n }\n },\n\n async flush(controller) {\n try {\n // from last step (when there are errors there may be no last step)\n const lastStep = recordedSteps[recordedSteps.length - 1];\n if (lastStep) {\n self.warningsPromise.resolve(lastStep.warnings);\n self.requestPromise.resolve(lastStep.request);\n self.responsePromise.resolve(lastStep.response);\n self.toolCallsPromise.resolve(lastStep.toolCalls);\n self.toolResultsPromise.resolve(lastStep.toolResults);\n self.providerMetadataPromise.resolve(\n lastStep.experimental_providerMetadata,\n );\n }\n\n // derived:\n const finishReason = recordedFinishReason ?? 'unknown';\n const usage = recordedUsage ?? {\n completionTokens: NaN,\n promptTokens: NaN,\n totalTokens: NaN,\n };\n\n // from finish:\n self.finishReasonPromise.resolve(finishReason);\n self.usagePromise.resolve(usage);\n\n // aggregate results:\n self.textPromise.resolve(recordedFullText);\n self.stepsPromise.resolve(recordedSteps);\n\n // call onFinish callback:\n await onFinish?.({\n finishReason,\n logprobs: undefined,\n usage,\n text: recordedFullText,\n toolCalls: lastStep.toolCalls,\n toolResults: lastStep.toolResults,\n request: lastStep.request ?? {},\n response: lastStep.response,\n warnings: lastStep.warnings,\n experimental_providerMetadata:\n lastStep.experimental_providerMetadata,\n steps: recordedSteps,\n });\n\n // Add response information to the root span:\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.text': { output: () => recordedFullText },\n 'ai.response.toolCalls': {\n output: () =>\n lastStep.toolCalls?.length\n ? JSON.stringify(lastStep.toolCalls)\n : undefined,\n },\n\n 'ai.usage.promptTokens': usage.promptTokens,\n 'ai.usage.completionTokens': usage.completionTokens,\n },\n }),\n );\n } catch (error) {\n controller.error(error);\n } finally {\n rootSpan.end();\n }\n },\n });\n\n // initialize the stitchable stream and the transformed stream:\n const stitchableStream = createStitchableStream<TextStreamPart<TOOLS>>();\n this.addStream = stitchableStream.addStream;\n this.closeStream = stitchableStream.close;\n this.baseStream = (\n transform\n ? stitchableStream.stream.pipeThrough(transform)\n : stitchableStream.stream\n ).pipeThrough(eventProcessor);\n\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg,\n });\n\n const tracer = getTracer(telemetry);\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const initialPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools,\n });\n\n const self = this;\n\n recordSpan({\n name: 'ai.streamText',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.streamText', telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.settings.maxSteps': maxSteps,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async rootSpanArg => {\n rootSpan = rootSpanArg;\n\n async function streamStep({\n currentStep,\n responseMessages,\n usage,\n stepType,\n previousStepText,\n hasLeadingWhitespace,\n }: {\n currentStep: number;\n responseMessages: Array<CoreAssistantMessage | CoreToolMessage>;\n usage: LanguageModelUsage;\n stepType: 'initial' | 'continue' | 'tool-result';\n previousStepText: string;\n hasLeadingWhitespace: boolean;\n }) {\n // after the 1st step, we need to switch to messages format:\n const promptFormat =\n responseMessages.length === 0 ? initialPrompt.type : 'messages';\n\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages,\n ];\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages,\n },\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n const mode = {\n type: 'regular' as const,\n ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools }),\n };\n\n const {\n result: { stream, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs,\n } = await retry(() =>\n recordSpan({\n name: 'ai.streamText.doStream',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamText.doStream',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => promptFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.prompt.tools': {\n // convert the language model level tools:\n input: () => mode.tools?.map(tool => JSON.stringify(tool)),\n },\n 'ai.prompt.toolChoice': {\n input: () =>\n mode.toolChoice != null\n ? JSON.stringify(mode.toolChoice)\n : undefined,\n },\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.stop_sequences': settings.stopSequences,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async doStreamSpan => ({\n startTimestampMs: now(), // get before the call\n doStreamSpan,\n result: await model.doStream({\n mode,\n ...prepareCallSettings(settings),\n inputFormat: promptFormat,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n }),\n }),\n }),\n );\n\n const transformedStream = runToolsTransformation({\n tools,\n generatorStream: stream,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages: stepInputMessages,\n repairToolCall,\n abortSignal,\n });\n\n const stepRequest = request ?? {};\n const stepToolCalls: ToolCallUnion<TOOLS>[] = [];\n const stepToolResults: ToolResultUnion<TOOLS>[] = [];\n let stepFinishReason: FinishReason = 'unknown';\n let stepUsage: LanguageModelUsage = {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0,\n };\n let stepProviderMetadata: ProviderMetadata | undefined;\n let stepFirstChunk = true;\n let stepText = '';\n let fullStepText = stepType === 'continue' ? previousStepText : '';\n let stepLogProbs: LogProbs | undefined;\n let stepResponse: { id: string; timestamp: Date; modelId: string } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n };\n\n // chunk buffer when using continue:\n let chunkBuffer = '';\n let chunkTextPublished = false;\n let inWhitespacePrefix = true;\n let hasWhitespaceSuffix = false; // for next step. when true, step ended with whitespace\n\n async function publishTextChunk({\n controller,\n chunk,\n }: {\n controller: TransformStreamDefaultController<TextStreamPart<TOOLS>>;\n chunk: TextStreamPart<TOOLS> & { type: 'text-delta' };\n }) {\n controller.enqueue(chunk);\n\n stepText += chunk.textDelta;\n fullStepText += chunk.textDelta;\n chunkTextPublished = true;\n hasWhitespaceSuffix = chunk.textDelta.trimEnd() !== chunk.textDelta;\n }\n\n self.addStream(\n transformedStream.pipeThrough(\n new TransformStream<\n SingleRequestTextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller): Promise<void> {\n // Telemetry for first chunk:\n if (stepFirstChunk) {\n const msToFirstChunk = now() - startTimestampMs;\n\n stepFirstChunk = false;\n\n doStreamSpan.addEvent('ai.stream.firstChunk', {\n 'ai.response.msToFirstChunk': msToFirstChunk,\n });\n\n doStreamSpan.setAttributes({\n 'ai.response.msToFirstChunk': msToFirstChunk,\n });\n }\n\n // Filter out empty text deltas\n if (\n chunk.type === 'text-delta' &&\n chunk.textDelta.length === 0\n ) {\n return;\n }\n\n const chunkType = chunk.type;\n switch (chunkType) {\n case 'text-delta': {\n if (continueSteps) {\n // when a new step starts, leading whitespace is to be discarded\n // when there is already preceding whitespace in the chunk buffer\n const trimmedChunkText =\n inWhitespacePrefix && hasLeadingWhitespace\n ? chunk.textDelta.trimStart()\n : chunk.textDelta;\n\n if (trimmedChunkText.length === 0) {\n break;\n }\n\n inWhitespacePrefix = false;\n chunkBuffer += trimmedChunkText;\n\n const split = splitOnLastWhitespace(chunkBuffer);\n\n // publish the text until the last whitespace:\n if (split != null) {\n chunkBuffer = split.suffix;\n\n await publishTextChunk({\n controller,\n chunk: {\n type: 'text-delta',\n textDelta: split.prefix + split.whitespace,\n },\n });\n }\n } else {\n await publishTextChunk({ controller, chunk });\n }\n\n break;\n }\n\n case 'tool-call': {\n controller.enqueue(chunk);\n // store tool calls for onFinish callback and toolCalls promise:\n stepToolCalls.push(chunk);\n break;\n }\n\n case 'tool-result': {\n controller.enqueue(chunk);\n // store tool results for onFinish callback and toolResults promise:\n stepToolResults.push(chunk);\n break;\n }\n\n case 'response-metadata': {\n stepResponse = {\n id: chunk.id ?? stepResponse.id,\n timestamp: chunk.timestamp ?? stepResponse.timestamp,\n modelId: chunk.modelId ?? stepResponse.modelId,\n };\n break;\n }\n\n case 'finish': {\n // Note: tool executions might not be finished yet when the finish event is emitted.\n // store usage and finish reason for promises and onFinish callback:\n stepUsage = chunk.usage;\n stepFinishReason = chunk.finishReason;\n stepProviderMetadata =\n chunk.experimental_providerMetadata;\n stepLogProbs = chunk.logprobs;\n\n // Telemetry for finish event timing\n // (since tool executions can take longer and distort calculations)\n const msToFinish = now() - startTimestampMs;\n doStreamSpan.addEvent('ai.stream.finish');\n doStreamSpan.setAttributes({\n 'ai.response.msToFinish': msToFinish,\n 'ai.response.avgCompletionTokensPerSecond':\n (1000 * stepUsage.completionTokens) / msToFinish,\n });\n\n break;\n }\n\n case 'tool-call-streaming-start':\n case 'tool-call-delta': {\n controller.enqueue(chunk);\n break;\n }\n\n case 'error': {\n controller.enqueue(chunk);\n stepFinishReason = 'error';\n break;\n }\n\n default: {\n const exhaustiveCheck: never = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n },\n\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n const stepToolCallsJson =\n stepToolCalls.length > 0\n ? JSON.stringify(stepToolCalls)\n : undefined;\n\n // determine the next step type\n let nextStepType: 'done' | 'continue' | 'tool-result' =\n 'done';\n if (currentStep + 1 < maxSteps) {\n if (\n continueSteps &&\n stepFinishReason === 'length' &&\n // only use continue when there are no tool calls:\n stepToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n stepToolCalls.length > 0 &&\n // all current tool calls have results:\n stepToolResults.length === stepToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // when using continuation, publish buffer on final step or if there\n // was no whitespace in the step:\n if (\n continueSteps &&\n chunkBuffer.length > 0 &&\n (nextStepType !== 'continue' || // when the next step is a regular step, publish the buffer\n (stepType === 'continue' && !chunkTextPublished)) // when the next step is a continue step, publish the buffer if no text was published in the step\n ) {\n await publishTextChunk({\n controller,\n chunk: {\n type: 'text-delta',\n textDelta: chunkBuffer,\n },\n });\n chunkBuffer = '';\n }\n\n // record telemetry information first to ensure best effort timing\n try {\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': stepFinishReason,\n 'ai.response.text': { output: () => stepText },\n 'ai.response.toolCalls': {\n output: () => stepToolCallsJson,\n },\n 'ai.response.id': stepResponse.id,\n 'ai.response.model': stepResponse.modelId,\n 'ai.response.timestamp':\n stepResponse.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': stepUsage.promptTokens,\n 'ai.usage.completionTokens':\n stepUsage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [stepFinishReason],\n 'gen_ai.response.id': stepResponse.id,\n 'gen_ai.response.model': stepResponse.modelId,\n 'gen_ai.usage.input_tokens': stepUsage.promptTokens,\n 'gen_ai.usage.output_tokens':\n stepUsage.completionTokens,\n },\n }),\n );\n } catch (error) {\n // ignore error setting telemetry attributes\n } finally {\n // finish doStreamSpan before other operations for correct timing:\n doStreamSpan.end();\n }\n\n controller.enqueue({\n type: 'step-finish',\n finishReason: stepFinishReason,\n usage: stepUsage,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n request: stepRequest,\n response: {\n ...stepResponse,\n headers: rawResponse?.headers,\n },\n warnings,\n isContinued: nextStepType === 'continue',\n });\n\n const combinedUsage = addLanguageModelUsage(usage, stepUsage);\n\n if (nextStepType === 'done') {\n controller.enqueue({\n type: 'finish',\n finishReason: stepFinishReason,\n usage: combinedUsage,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n response: {\n ...stepResponse,\n headers: rawResponse?.headers,\n },\n });\n\n self.closeStream(); // close the stitchable stream\n } else {\n // append to messages for the next step:\n if (stepType === 'continue') {\n // continue step: update the last assistant message\n // continue is only possible when there are no tool calls,\n // so we can assume that there is a single last assistant message:\n const lastMessage = responseMessages[\n responseMessages.length - 1\n ] as CoreAssistantMessage;\n\n if (typeof lastMessage.content === 'string') {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: 'text',\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text: stepText,\n tools: tools ?? ({} as TOOLS),\n toolCalls: stepToolCalls,\n toolResults: stepToolResults,\n }),\n );\n }\n\n await streamStep({\n currentStep: currentStep + 1,\n responseMessages,\n usage: combinedUsage,\n stepType: nextStepType,\n previousStepText: fullStepText,\n hasLeadingWhitespace: hasWhitespaceSuffix,\n });\n }\n },\n }),\n ),\n );\n }\n\n // add the initial stream to the stitchable stream\n await streamStep({\n currentStep: 0,\n responseMessages: [],\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0,\n },\n previousStepText: '',\n stepType: 'initial',\n hasLeadingWhitespace: false,\n });\n },\n }).catch(error => {\n // add an error stream part and close the streams:\n self.addStream(\n new ReadableStream({\n start(controller) {\n controller.enqueue({ type: 'error', error });\n controller.close();\n },\n }),\n );\n self.closeStream();\n });\n }\n\n get warnings() {\n return this.warningsPromise.value;\n }\n\n get usage() {\n return this.usagePromise.value;\n }\n\n get finishReason() {\n return this.finishReasonPromise.value;\n }\n\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n\n get text() {\n return this.textPromise.value;\n }\n\n get toolCalls() {\n return this.toolCallsPromise.value;\n }\n\n get toolResults() {\n return this.toolResultsPromise.value;\n }\n\n get request() {\n return this.requestPromise.value;\n }\n\n get response() {\n return this.responsePromise.value;\n }\n\n get steps() {\n return this.stepsPromise.value;\n }\n\n /**\nSplit out a new stream from the original stream.\nThe original stream is replaced to allow for further splitting,\nsince we do not know how many times the stream will be split.\n\nNote: this leads to buffering the stream content on the server.\nHowever, the LLM results are expected to be small enough to not cause issues.\n */\n private teeStream() {\n const [stream1, stream2] = this.baseStream.tee();\n this.baseStream = stream2;\n return stream1;\n }\n\n get textStream(): AsyncIterableStream<string> {\n return createAsyncIterableStream(\n this.teeStream().pipeThrough(\n new TransformStream<TextStreamPart<TOOLS>, string>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n controller.enqueue(chunk.textDelta);\n } else if (chunk.type === 'error') {\n controller.error(chunk.error);\n }\n },\n }),\n ),\n );\n }\n\n get fullStream(): AsyncIterableStream<TextStreamPart<TOOLS>> {\n return createAsyncIterableStream(this.teeStream());\n }\n\n private toDataStreamInternal({\n getErrorMessage = () => 'An error occurred.', // mask error messages for safety by default\n sendUsage = true,\n }: {\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n } = {}): ReadableStream<DataStreamString> {\n let aggregatedResponse = '';\n\n const callbackTransformer = new TransformStream<\n TextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller): Promise<void> {\n controller.enqueue(chunk);\n\n if (chunk.type === 'text-delta') {\n aggregatedResponse += chunk.textDelta;\n }\n },\n });\n\n const streamPartsTransformer = new TransformStream<\n TextStreamPart<TOOLS>,\n DataStreamString\n >({\n transform: async (chunk, controller) => {\n const chunkType = chunk.type;\n switch (chunkType) {\n case 'text-delta': {\n controller.enqueue(formatDataStreamPart('text', chunk.textDelta));\n break;\n }\n\n case 'tool-call-streaming-start': {\n controller.enqueue(\n formatDataStreamPart('tool_call_streaming_start', {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n }),\n );\n break;\n }\n\n case 'tool-call-delta': {\n controller.enqueue(\n formatDataStreamPart('tool_call_delta', {\n toolCallId: chunk.toolCallId,\n argsTextDelta: chunk.argsTextDelta,\n }),\n );\n break;\n }\n\n case 'tool-call': {\n controller.enqueue(\n formatDataStreamPart('tool_call', {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n args: chunk.args,\n }),\n );\n break;\n }\n\n case 'tool-result': {\n controller.enqueue(\n formatDataStreamPart('tool_result', {\n toolCallId: chunk.toolCallId,\n result: chunk.result,\n }),\n );\n break;\n }\n\n case 'error': {\n controller.enqueue(\n formatDataStreamPart('error', getErrorMessage(chunk.error)),\n );\n break;\n }\n\n case 'step-finish': {\n controller.enqueue(\n formatDataStreamPart('finish_step', {\n finishReason: chunk.finishReason,\n usage: sendUsage\n ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens,\n }\n : undefined,\n isContinued: chunk.isContinued,\n }),\n );\n break;\n }\n\n case 'finish': {\n controller.enqueue(\n formatDataStreamPart('finish_message', {\n finishReason: chunk.finishReason,\n usage: sendUsage\n ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens,\n }\n : undefined,\n }),\n );\n break;\n }\n\n default: {\n const exhaustiveCheck: never = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n },\n });\n\n return this.fullStream\n .pipeThrough(callbackTransformer)\n .pipeThrough(streamPartsTransformer);\n }\n\n pipeDataStreamToResponse(\n response: ServerResponse,\n {\n status,\n statusText,\n headers,\n data,\n getErrorMessage,\n sendUsage,\n }: ResponseInit & {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean; // default to true (change to false in v4: secure by default)\n } = {},\n ) {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n stream: this.toDataStream({ data, getErrorMessage, sendUsage }),\n });\n }\n\n pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit) {\n writeToServerResponse({\n response,\n status: init?.status,\n statusText: init?.statusText,\n headers: prepareOutgoingHttpHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream()),\n });\n }\n\n // TODO breaking change 5.0: remove pipeThrough(new TextEncoderStream())\n toDataStream(options?: {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n }) {\n const stream = this.toDataStreamInternal({\n getErrorMessage: options?.getErrorMessage,\n sendUsage: options?.sendUsage,\n }).pipeThrough(new TextEncoderStream());\n\n return options?.data ? mergeStreams(options?.data.stream, stream) : stream;\n }\n\n mergeIntoDataStream(writer: DataStreamWriter) {\n writer.merge(\n this.toDataStreamInternal({\n getErrorMessage: writer.onError,\n }),\n );\n }\n\n toDataStreamResponse({\n headers,\n status,\n statusText,\n data,\n getErrorMessage,\n sendUsage,\n }: ResponseInit & {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n } = {}): Response {\n return new Response(\n this.toDataStream({ data, getErrorMessage, sendUsage }),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n },\n );\n }\n\n toTextStreamResponse(init?: ResponseInit): Response {\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n });\n }\n}\n","/**\n * Merges two readable streams into a single readable stream, emitting values\n * from each stream as they become available.\n *\n * The first stream is prioritized over the second stream. If both streams have\n * values available, the first stream's value is emitted first.\n *\n * @template VALUE1 - The type of values emitted by the first stream.\n * @template VALUE2 - The type of values emitted by the second stream.\n * @param {ReadableStream<VALUE1>} stream1 - The first readable stream.\n * @param {ReadableStream<VALUE2>} stream2 - The second readable stream.\n * @returns {ReadableStream<VALUE1 | VALUE2>} A new readable stream that emits values from both input streams.\n */\nexport function mergeStreams<VALUE1, VALUE2>(\n stream1: ReadableStream<VALUE1>,\n stream2: ReadableStream<VALUE2>,\n): ReadableStream<VALUE1 | VALUE2> {\n const reader1 = stream1.getReader();\n const reader2 = stream2.getReader();\n\n let lastRead1: Promise<ReadableStreamReadResult<VALUE1>> | undefined =\n undefined;\n let lastRead2: Promise<ReadableStreamReadResult<VALUE2>> | undefined =\n undefined;\n\n let stream1Done = false;\n let stream2Done = false;\n\n // only use when stream 2 is done:\n async function readStream1(\n controller: ReadableStreamDefaultController<VALUE1 | VALUE2>,\n ) {\n try {\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n\n const result = await lastRead1;\n lastRead1 = undefined;\n\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n\n // only use when stream 1 is done:\n async function readStream2(\n controller: ReadableStreamDefaultController<VALUE1 | VALUE2>,\n ) {\n try {\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n\n const result = await lastRead2;\n lastRead2 = undefined;\n\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n\n return new ReadableStream<VALUE1 | VALUE2>({\n async pull(controller) {\n try {\n // stream 1 is done, we can only read from stream 2:\n if (stream1Done) {\n await readStream2(controller);\n return;\n }\n\n // stream 2 is done, we can only read from stream 1:\n if (stream2Done) {\n await readStream1(controller);\n return;\n }\n\n // pull the next value from the stream that was read last:\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n\n // Note on Promise.race (prioritizing stream 1 over stream 2):\n // If the iterable contains one or more non-promise values and/or an already settled promise,\n // then Promise.race() will settle to the first of these values found in the iterable.\n const { result, reader } = await Promise.race([\n lastRead1.then(result => ({ result, reader: reader1 })),\n lastRead2.then(result => ({ result, reader: reader2 })),\n ]);\n\n if (!result.done) {\n controller.enqueue(result.value);\n }\n\n if (reader === reader1) {\n lastRead1 = undefined;\n if (result.done) {\n // stream 1 is done, we can only read from stream 2:\n await readStream2(controller);\n stream1Done = true;\n }\n } else {\n lastRead2 = undefined;\n // stream 2 is done, we can only read from stream 1:\n if (result.done) {\n stream2Done = true;\n await readStream1(controller);\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader1.cancel();\n reader2.cancel();\n },\n });\n}\n","import { LanguageModelV1StreamPart } from '@ai-sdk/provider';\nimport { generateId } from '@ai-sdk/ui-utils';\nimport { Tracer } from '@opentelemetry/api';\nimport { ToolExecutionError } from '../../errors';\nimport { CoreMessage } from '../prompt/message';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool';\nimport {\n FinishReason,\n LanguageModelUsage,\n LogProbs,\n ProviderMetadata,\n} from '../types';\nimport { calculateLanguageModelUsage } from '../types/usage';\nimport { parseToolCall } from './parse-tool-call';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultUnion } from './tool-result';\n\nexport type SingleRequestTextStreamPart<\n TOOLS extends Record<string, CoreTool>,\n> =\n | {\n type: 'text-delta';\n textDelta: string;\n }\n | ({\n type: 'tool-call';\n } & ToolCallUnion<TOOLS>)\n | {\n type: 'tool-call-streaming-start';\n toolCallId: string;\n toolName: string;\n }\n | {\n type: 'tool-call-delta';\n toolCallId: string;\n toolName: string;\n argsTextDelta: string;\n }\n | ({\n type: 'tool-result';\n } & ToolResultUnion<TOOLS>)\n | {\n type: 'response-metadata';\n id?: string;\n timestamp?: Date;\n modelId?: string;\n }\n | {\n type: 'finish';\n finishReason: FinishReason;\n logprobs?: LogProbs;\n usage: LanguageModelUsage;\n experimental_providerMetadata?: ProviderMetadata;\n }\n | {\n type: 'error';\n error: unknown;\n };\n\nexport function runToolsTransformation<TOOLS extends Record<string, CoreTool>>({\n tools,\n generatorStream,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages,\n abortSignal,\n repairToolCall,\n}: {\n tools: TOOLS | undefined;\n generatorStream: ReadableStream<LanguageModelV1StreamPart>;\n toolCallStreaming: boolean;\n tracer: Tracer;\n telemetry: TelemetrySettings | undefined;\n system: string | undefined;\n messages: CoreMessage[];\n abortSignal: AbortSignal | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n}): ReadableStream<SingleRequestTextStreamPart<TOOLS>> {\n // tool results stream\n let toolResultsStreamController: ReadableStreamDefaultController<\n SingleRequestTextStreamPart<TOOLS>\n > | null = null;\n const toolResultsStream = new ReadableStream<\n SingleRequestTextStreamPart<TOOLS>\n >({\n start(controller) {\n toolResultsStreamController = controller;\n },\n });\n\n // keep track of active tool calls for tool call streaming:\n const activeToolCalls: Record<string, boolean> = {};\n\n // keep track of outstanding tool results for stream closing:\n const outstandingToolResults = new Set<string>();\n\n let canClose = false;\n let finishChunk:\n | (SingleRequestTextStreamPart<TOOLS> & { type: 'finish' })\n | undefined = undefined;\n\n function attemptClose() {\n // close the tool results controller if no more outstanding tool calls\n if (canClose && outstandingToolResults.size === 0) {\n // we delay sending the finish chunk until all tool results (incl. delayed ones)\n // are received to ensure that the frontend receives tool results before a message\n // finish event arrives.\n if (finishChunk != null) {\n toolResultsStreamController!.enqueue(finishChunk);\n }\n\n toolResultsStreamController!.close();\n }\n }\n\n // forward stream\n const forwardStream = new TransformStream<\n LanguageModelV1StreamPart,\n SingleRequestTextStreamPart<TOOLS>\n >({\n async transform(\n chunk: LanguageModelV1StreamPart,\n controller: TransformStreamDefaultController<\n SingleRequestTextStreamPart<TOOLS>\n >,\n ) {\n const chunkType = chunk.type;\n\n switch (chunkType) {\n // forward:\n case 'text-delta':\n case 'response-metadata':\n case 'error': {\n controller.enqueue(chunk);\n break;\n }\n\n // forward with less information:\n case 'tool-call-delta': {\n if (toolCallStreaming) {\n if (!activeToolCalls[chunk.toolCallId]) {\n controller.enqueue({\n type: 'tool-call-streaming-start',\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n });\n\n activeToolCalls[chunk.toolCallId] = true;\n }\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n argsTextDelta: chunk.argsTextDelta,\n });\n }\n break;\n }\n\n // process tool call:\n case 'tool-call': {\n try {\n const toolCall = await parseToolCall({\n toolCall: chunk,\n tools,\n repairToolCall,\n system,\n messages,\n });\n\n controller.enqueue(toolCall);\n\n const tool = tools![toolCall.toolName];\n\n if (tool.execute != null) {\n const toolExecutionId = generateId(); // use our own id to guarantee uniqueness\n outstandingToolResults.add(toolExecutionId);\n\n // Note: we don't await the tool execution here (by leaving out 'await' on recordSpan),\n // because we want to process the next chunk as soon as possible.\n // This is important for the case where the tool execution takes a long time.\n recordSpan({\n name: 'ai.toolCall',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.toolCall',\n telemetry,\n }),\n 'ai.toolCall.name': toolCall.toolName,\n 'ai.toolCall.id': toolCall.toolCallId,\n 'ai.toolCall.args': {\n output: () => JSON.stringify(toolCall.args),\n },\n },\n }),\n tracer,\n fn: async span =>\n tool.execute!(toolCall.args, {\n toolCallId: toolCall.toolCallId,\n messages,\n abortSignal,\n }).then(\n (result: any) => {\n toolResultsStreamController!.enqueue({\n ...toolCall,\n type: 'tool-result',\n result,\n } as any);\n\n outstandingToolResults.delete(toolExecutionId);\n\n attemptClose();\n\n // record telemetry\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.toolCall.result': {\n output: () => JSON.stringify(result),\n },\n },\n }),\n );\n } catch (ignored) {\n // JSON stringify might fail if the result is not serializable,\n // in which case we just ignore it. In the future we might want to\n // add an optional serialize method to the tool interface and warn\n // if the result is not serializable.\n }\n },\n (error: any) => {\n toolResultsStreamController!.enqueue({\n type: 'error',\n error: new ToolExecutionError({\n toolName: toolCall.toolName,\n toolArgs: toolCall.args,\n cause: error,\n }),\n });\n\n outstandingToolResults.delete(toolExecutionId);\n attemptClose();\n },\n ),\n });\n }\n } catch (error) {\n toolResultsStreamController!.enqueue({\n type: 'error',\n error,\n });\n }\n\n break;\n }\n\n case 'finish': {\n finishChunk = {\n type: 'finish',\n finishReason: chunk.finishReason,\n logprobs: chunk.logprobs,\n usage: calculateLanguageModelUsage(chunk.usage),\n experimental_providerMetadata: chunk.providerMetadata,\n };\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = chunkType;\n throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n\n flush() {\n canClose = true;\n attemptClose();\n },\n });\n\n // combine the generator stream and the tool results stream\n return new ReadableStream<SingleRequestTextStreamPart<TOOLS>>({\n async start(controller) {\n // need to wait for both pipes so there are no dangling promises that\n // can cause uncaught promise rejections when the stream is aborted\n return Promise.all([\n generatorStream.pipeThrough(forwardStream).pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n // the generator stream controller is automatically closed when it's consumed\n },\n }),\n ),\n toolResultsStream.pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n controller.close();\n },\n }),\n ),\n ]);\n },\n });\n}\n","import { delay as originalDelay } from '../../util/delay';\nimport { CoreTool } from '../tool/tool';\nimport { TextStreamPart } from './stream-text-result';\n\n/**\n * Smooths text streaming output.\n *\n * @param delayInMs - The delay in milliseconds between each chunk. Defaults to 10ms.\n * @returns A transform stream that smooths text streaming output.\n */\nexport function smoothStream<TOOLS extends Record<string, CoreTool>>({\n delayInMs = 10,\n _internal: { delay = originalDelay } = {},\n}: {\n delayInMs?: number;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n delay?: (delayInMs: number) => Promise<void>;\n };\n} = {}): (options: {\n tools: TOOLS;\n}) => TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>> {\n let buffer = '';\n\n return () =>\n new TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>({\n async transform(chunk, controller) {\n if (chunk.type === 'step-finish') {\n if (buffer.length > 0) {\n controller.enqueue({ type: 'text-delta', textDelta: buffer });\n buffer = '';\n }\n\n controller.enqueue(chunk);\n return;\n }\n\n if (chunk.type !== 'text-delta') {\n controller.enqueue(chunk);\n return;\n }\n\n buffer += chunk.textDelta;\n\n // Stream out complete words when whitespace is found\n while (buffer.match(/\\s/)) {\n const whitespaceIndex = buffer.search(/\\s/);\n const word = buffer.slice(0, whitespaceIndex + 1);\n controller.enqueue({ type: 'text-delta', textDelta: word });\n buffer = buffer.slice(whitespaceIndex + 1);\n\n if (delayInMs > 0) {\n await delay(delayInMs);\n }\n }\n },\n });\n}\n","import { LanguageModelV1, LanguageModelV1CallOptions } from '@ai-sdk/provider';\nimport { Experimental_LanguageModelV1Middleware } from './language-model-v1-middleware';\n\n/**\n * Wraps a LanguageModelV1 instance with middleware functionality.\n * This function allows you to apply middleware to transform parameters,\n * wrap generate operations, and wrap stream operations of a language model.\n *\n * @param options - Configuration options for wrapping the language model.\n * @param options.model - The original LanguageModelV1 instance to be wrapped.\n * @param options.middleware - The middleware to be applied to the language model.\n * @param options.modelId - Optional custom model ID to override the original model's ID.\n * @param options.providerId - Optional custom provider ID to override the original model's provider.\n * @returns A new LanguageModelV1 instance with middleware applied.\n */\nexport const experimental_wrapLanguageModel = ({\n model,\n middleware: { transformParams, wrapGenerate, wrapStream },\n modelId,\n providerId,\n}: {\n model: LanguageModelV1;\n middleware: Experimental_LanguageModelV1Middleware;\n modelId?: string;\n providerId?: string;\n}): LanguageModelV1 => {\n async function doTransform({\n params,\n type,\n }: {\n params: LanguageModelV1CallOptions;\n type: 'generate' | 'stream';\n }) {\n return transformParams ? await transformParams({ params, type }) : params;\n }\n\n return {\n specificationVersion: 'v1',\n\n provider: providerId ?? model.provider,\n modelId: modelId ?? model.modelId,\n\n defaultObjectGenerationMode: model.defaultObjectGenerationMode,\n supportsImageUrls: model.supportsImageUrls,\n supportsUrl: model.supportsUrl,\n supportsStructuredOutputs: model.supportsStructuredOutputs,\n\n async doGenerate(\n params: LanguageModelV1CallOptions,\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const transformedParams = await doTransform({ params, type: 'generate' });\n const doGenerate = async () => model.doGenerate(transformedParams);\n return wrapGenerate\n ? wrapGenerate({ doGenerate, params: transformedParams, model })\n : doGenerate();\n },\n\n async doStream(\n params: LanguageModelV1CallOptions,\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const transformedParams = await doTransform({ params, type: 'stream' });\n const doStream = async () => model.doStream(transformedParams);\n return wrapStream\n ? wrapStream({ doStream, params: transformedParams, model })\n : doStream();\n },\n };\n};\n","import { EmbeddingModelV1, LanguageModelV1 } from '@ai-sdk/provider';\nimport { Provider } from '../types';\nimport { NoSuchModelError } from '@ai-sdk/provider';\n\n/**\n * Creates a custom provider with specified language models, text embedding models, and an optional fallback provider.\n *\n * @param {Object} options - The options for creating the custom provider.\n * @param {Record<string, LanguageModelV1>} [options.languageModels] - A record of language models, where keys are model IDs and values are LanguageModelV1 instances.\n * @param {Record<string, EmbeddingModelV1<string>>} [options.textEmbeddingModels] - A record of text embedding models, where keys are model IDs and values are EmbeddingModelV1<string> instances.\n * @param {Provider} [options.fallbackProvider] - An optional fallback provider to use when a requested model is not found in the custom provider.\n * @returns {Provider} A Provider object with languageModel and textEmbeddingModel methods.\n *\n * @throws {NoSuchModelError} Throws when a requested model is not found and no fallback provider is available.\n */\nexport function experimental_customProvider({\n languageModels,\n textEmbeddingModels,\n fallbackProvider,\n}: {\n languageModels?: Record<string, LanguageModelV1>;\n textEmbeddingModels?: Record<string, EmbeddingModelV1<string>>;\n fallbackProvider?: Provider;\n}): Provider {\n return {\n languageModel(modelId: string): LanguageModelV1 {\n if (languageModels != null && modelId in languageModels) {\n return languageModels[modelId];\n }\n\n if (fallbackProvider) {\n return fallbackProvider.languageModel(modelId);\n }\n\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n },\n\n textEmbeddingModel(modelId: string): EmbeddingModelV1<string> {\n if (textEmbeddingModels != null && modelId in textEmbeddingModels) {\n return textEmbeddingModels[modelId];\n }\n\n if (fallbackProvider) {\n return fallbackProvider.textEmbeddingModel(modelId);\n }\n\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n },\n };\n}\n","import { AISDKError, NoSuchModelError } from '@ai-sdk/provider';\n\nconst name = 'AI_NoSuchProviderError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchProviderError extends NoSuchModelError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly providerId: string;\n readonly availableProviders: string[];\n\n constructor({\n modelId,\n modelType,\n providerId,\n availableProviders,\n message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`,\n }: {\n modelId: string;\n modelType: 'languageModel' | 'textEmbeddingModel';\n providerId: string;\n availableProviders: string[];\n message?: string;\n }) {\n super({ errorName: name, modelId, modelType, message });\n\n this.providerId = providerId;\n this.availableProviders = availableProviders;\n }\n\n static isInstance(error: unknown): error is NoSuchProviderError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { NoSuchModelError } from '@ai-sdk/provider';\nimport { EmbeddingModel, LanguageModel, Provider } from '../types';\nimport { NoSuchProviderError } from './no-such-provider-error';\n\n/**\n * Creates a registry for the given providers.\n */\nexport function experimental_createProviderRegistry(\n providers: Record<string, Provider>,\n): Provider {\n const registry = new DefaultProviderRegistry();\n\n for (const [id, provider] of Object.entries(providers)) {\n registry.registerProvider({ id, provider });\n }\n\n return registry;\n}\n\nclass DefaultProviderRegistry implements Provider {\n private providers: Record<string, Provider> = {};\n\n registerProvider({ id, provider }: { id: string; provider: Provider }): void {\n this.providers[id] = provider;\n }\n\n private getProvider(id: string): Provider {\n const provider = this.providers[id];\n\n if (provider == null) {\n throw new NoSuchProviderError({\n modelId: id,\n modelType: 'languageModel',\n providerId: id,\n availableProviders: Object.keys(this.providers),\n });\n }\n\n return provider;\n }\n\n private splitId(\n id: string,\n modelType: 'languageModel' | 'textEmbeddingModel',\n ): [string, string] {\n const index = id.indexOf(':');\n\n if (index === -1) {\n throw new NoSuchModelError({\n modelId: id,\n modelType,\n message:\n `Invalid ${modelType} id for registry: ${id} ` +\n `(must be in the format \"providerId:modelId\")`,\n });\n }\n\n return [id.slice(0, index), id.slice(index + 1)];\n }\n\n languageModel(id: string): LanguageModel {\n const [providerId, modelId] = this.splitId(id, 'languageModel');\n const model = this.getProvider(providerId).languageModel?.(modelId);\n\n if (model == null) {\n throw new NoSuchModelError({ modelId: id, modelType: 'languageModel' });\n }\n\n return model;\n }\n\n textEmbeddingModel(id: string): EmbeddingModel<string> {\n const [providerId, modelId] = this.splitId(id, 'textEmbeddingModel');\n const provider = this.getProvider(providerId);\n\n const model = provider.textEmbeddingModel?.(modelId);\n\n if (model == null) {\n throw new NoSuchModelError({\n modelId: id,\n modelType: 'textEmbeddingModel',\n });\n }\n\n return model;\n }\n\n /**\n * @deprecated Use `textEmbeddingModel` instead.\n */\n textEmbedding(id: string): EmbeddingModel<string> {\n return this.textEmbeddingModel(id);\n }\n}\n","import { Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { ToolResultContent } from '../prompt/tool-result-content';\nimport { CoreMessage } from '../prompt/message';\n\ntype Parameters = z.ZodTypeAny | Schema<any>;\n\nexport type inferParameters<PARAMETERS extends Parameters> =\n PARAMETERS extends Schema<any>\n ? PARAMETERS['_type']\n : PARAMETERS extends z.ZodTypeAny\n ? z.infer<PARAMETERS>\n : never;\n\nexport interface ToolExecutionOptions {\n /**\n * The ID of the tool call. You can use it e.g. when sending tool-call related information with stream data.\n */\n toolCallId: string;\n\n /**\n * Messages that were sent to the language model to initiate the response that contained the tool call.\n * The messages **do not** include the system prompt nor the assistant response that contained the tool call.\n */\n messages: CoreMessage[];\n\n /**\n * An optional abort signal that indicates that the overall operation should be aborted.\n */\n abortSignal?: AbortSignal;\n}\n\n/**\nA tool contains the description and the schema of the input that the tool expects.\nThis enables the language model to generate the input.\n\nThe tool can also contain an optional execute function for the actual execution function of the tool.\n */\nexport type CoreTool<PARAMETERS extends Parameters = any, RESULT = any> = {\n /**\nThe schema of the input that the tool expects. The language model will use this to generate the input.\nIt is also used to validate the output of the language model.\nUse descriptions to make the input understandable for the language model.\n */\n parameters: PARAMETERS;\n\n /**\nOptional conversion function that maps the tool result to multi-part tool content for LLMs.\n */\n experimental_toToolResultContent?: (result: RESULT) => ToolResultContent;\n\n /**\nAn async function that is called with the arguments from the tool call and produces a result.\nIf not provided, the tool will not be executed automatically.\n\n@args is the input of the tool call.\n@options.abortSignal is a signal that can be used to abort the tool call.\n */\n execute?: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n} & (\n | {\n /**\nFunction tool.\n */\n type?: undefined | 'function';\n\n /**\nAn optional description of what the tool does. Will be used by the language model to decide whether to use the tool.\n */\n description?: string;\n }\n | {\n /**\nProvider-defined tool.\n */\n type: 'provider-defined';\n\n /**\nThe ID of the tool. Should follow the format `<provider-name>.<tool-name>`.\n */\n id: `${string}.${string}`;\n\n /**\nThe arguments for configuring the tool. Must match the expected arguments defined by the provider for this tool.\n */\n args: Record<string, unknown>;\n }\n);\n\n/**\nHelper function for inferring the execute args of a tool.\n */\n// Note: special type inference is needed for the execute function args to make sure they are inferred correctly.\nexport function tool<PARAMETERS extends Parameters, RESULT>(\n tool: CoreTool<PARAMETERS, RESULT> & {\n execute: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n },\n): CoreTool<PARAMETERS, RESULT> & {\n execute: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n};\nexport function tool<PARAMETERS extends Parameters, RESULT>(\n tool: CoreTool<PARAMETERS, RESULT> & {\n execute?: undefined;\n },\n): CoreTool<PARAMETERS, RESULT> & {\n execute: undefined;\n};\nexport function tool<PARAMETERS extends Parameters, RESULT = any>(\n tool: CoreTool<PARAMETERS, RESULT>,\n): CoreTool<PARAMETERS, RESULT> {\n return tool;\n}\n","/**\n * Calculates the cosine similarity between two vectors. This is a useful metric for\n * comparing the similarity of two vectors such as embeddings.\n *\n * @param vector1 - The first vector.\n * @param vector2 - The second vector.\n *\n * @returns The cosine similarity between vector1 and vector2.\n * @throws {Error} If the vectors do not have the same length.\n */\nexport function cosineSimilarity(vector1: number[], vector2: number[]) {\n if (vector1.length !== vector2.length) {\n throw new Error(\n `Vectors must have the same length (vector1: ${vector1.length} elements, vector2: ${vector2.length} elements)`,\n );\n }\n\n return (\n dotProduct(vector1, vector2) / (magnitude(vector1) * magnitude(vector2))\n );\n}\n\n/**\n * Calculates the dot product of two vectors.\n * @param vector1 - The first vector.\n * @param vector2 - The second vector.\n * @returns The dot product of vector1 and vector2.\n */\nfunction dotProduct(vector1: number[], vector2: number[]) {\n return vector1.reduce(\n (accumulator: number, value: number, index: number) =>\n accumulator + value * vector2[index]!,\n 0,\n );\n}\n\n/**\n * Calculates the magnitude of a vector.\n * @param vector - The vector.\n * @returns The magnitude of the vector.\n */\nfunction magnitude(vector: number[]) {\n return Math.sqrt(dotProduct(vector, vector));\n}\n","import {\n AssistantMessage,\n DataMessage,\n formatAssistantStreamPart,\n} from '@ai-sdk/ui-utils';\n\n/**\nYou can pass the thread and the latest message into the `AssistantResponse`. This establishes the context for the response.\n */\ntype AssistantResponseSettings = {\n /**\nThe thread ID that the response is associated with.\n */\n threadId: string;\n\n /**\nThe ID of the latest message that the response is associated with.\n */\n messageId: string;\n};\n\n/**\nThe process parameter is a callback in which you can run the assistant on threads, and send messages and data messages to the client.\n */\ntype AssistantResponseCallback = (options: {\n /**\nForwards an assistant message (non-streaming) to the client.\n */\n sendMessage: (message: AssistantMessage) => void;\n\n /**\nSend a data message to the client. You can use this to provide information for rendering custom UIs while the assistant is processing the thread.\n */\n sendDataMessage: (message: DataMessage) => void;\n\n /**\nForwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.\n */\n forwardStream: (stream: any) => Promise<any | undefined>;\n}) => Promise<void>;\n\n/**\nThe `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.\nIt is designed to facilitate streaming assistant responses to the `useAssistant` hook.\nIt receives an assistant thread and a current message, and can send messages and data messages to the client.\n */\nexport function AssistantResponse(\n { threadId, messageId }: AssistantResponseSettings,\n process: AssistantResponseCallback,\n): Response {\n const stream = new ReadableStream({\n async start(controller) {\n const textEncoder = new TextEncoder();\n\n const sendMessage = (message: AssistantMessage) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_message', message),\n ),\n );\n };\n\n const sendDataMessage = (message: DataMessage) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('data_message', message),\n ),\n );\n };\n\n const sendError = (errorMessage: string) => {\n controller.enqueue(\n textEncoder.encode(formatAssistantStreamPart('error', errorMessage)),\n );\n };\n\n const forwardStream = async (stream: any) => {\n let result: any | undefined = undefined;\n\n for await (const value of stream) {\n switch (value.event) {\n case 'thread.message.created': {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_message', {\n id: value.data.id,\n role: 'assistant',\n content: [{ type: 'text', text: { value: '' } }],\n }),\n ),\n );\n break;\n }\n\n case 'thread.message.delta': {\n const content = value.data.delta.content?.[0];\n\n if (content?.type === 'text' && content.text?.value != null) {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('text', content.text.value),\n ),\n );\n }\n\n break;\n }\n\n case 'thread.run.completed':\n case 'thread.run.requires_action': {\n result = value.data;\n break;\n }\n }\n }\n\n return result;\n };\n\n // send the threadId and messageId as the first message:\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_control_data', {\n threadId,\n messageId,\n }),\n ),\n );\n\n try {\n await process({\n sendMessage,\n sendDataMessage,\n forwardStream,\n });\n } catch (error) {\n sendError((error as any).message ?? `${error}`);\n } finally {\n controller.close();\n }\n },\n pull(controller) {},\n cancel() {},\n });\n\n return new Response(stream, {\n status: 200,\n headers: {\n 'Content-Type': 'text/plain; charset=utf-8',\n },\n });\n}\n","import { formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from '../core/data-stream/data-stream-writer';\nimport { mergeStreams } from '../core/util/merge-streams';\nimport { prepareResponseHeaders } from '../core/util/prepare-response-headers';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\nimport { StreamData } from './stream-data';\n\ntype LangChainImageDetail = 'auto' | 'low' | 'high';\n\ntype LangChainMessageContentText = {\n type: 'text';\n text: string;\n};\n\ntype LangChainMessageContentImageUrl = {\n type: 'image_url';\n image_url:\n | string\n | {\n url: string;\n detail?: LangChainImageDetail;\n };\n};\n\ntype LangChainMessageContentComplex =\n | LangChainMessageContentText\n | LangChainMessageContentImageUrl\n | (Record<string, any> & {\n type?: 'text' | 'image_url' | string;\n })\n | (Record<string, any> & {\n type?: never;\n });\n\ntype LangChainMessageContent = string | LangChainMessageContentComplex[];\n\ntype LangChainAIMessageChunk = {\n content: LangChainMessageContent;\n};\n\n// LC stream event v2\ntype LangChainStreamEvent = {\n event: string;\n data: any;\n};\n\nfunction toDataStreamInternal(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n callbacks?: StreamCallbacks,\n) {\n return stream\n .pipeThrough(\n new TransformStream<\n LangChainStreamEvent | LangChainAIMessageChunk | string\n >({\n transform: async (value, controller) => {\n // text stream:\n if (typeof value === 'string') {\n controller.enqueue(value);\n return;\n }\n\n // LC stream events v2:\n if ('event' in value) {\n // chunk is AIMessage Chunk for on_chat_model_stream event:\n if (value.event === 'on_chat_model_stream') {\n forwardAIMessageChunk(\n value.data?.chunk as LangChainAIMessageChunk,\n controller,\n );\n }\n return;\n }\n\n // AI Message chunk stream:\n forwardAIMessageChunk(value, controller);\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart('text', chunk));\n },\n }),\n );\n}\n\n/**\nConverts LangChain output streams to an AI SDK Data Stream.\n\nThe following streams are supported:\n- `LangChainAIMessageChunk` streams (LangChain `model.stream` output)\n- `string` streams (LangChain `StringOutputParser` output)\n */\nexport function toDataStream(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n callbacks?: StreamCallbacks,\n) {\n return toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n}\n\nexport function toDataStreamResponse(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n options?: {\n init?: ResponseInit;\n data?: StreamData;\n callbacks?: StreamCallbacks;\n },\n) {\n const dataStream = toDataStreamInternal(\n stream,\n options?.callbacks,\n ).pipeThrough(new TextEncoderStream());\n const data = options?.data;\n const init = options?.init;\n\n const responseStream = data\n ? mergeStreams(data.stream, dataStream)\n : dataStream;\n\n return new Response(responseStream, {\n status: init?.status ?? 200,\n statusText: init?.statusText,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n });\n}\n\nexport function mergeIntoDataStream(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n options: { dataStream: DataStreamWriter; callbacks?: StreamCallbacks },\n) {\n options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));\n}\n\nfunction forwardAIMessageChunk(\n chunk: LangChainAIMessageChunk,\n controller: TransformStreamDefaultController<any>,\n) {\n if (typeof chunk.content === 'string') {\n controller.enqueue(chunk.content);\n } else {\n const content: LangChainMessageContentComplex[] = chunk.content;\n for (const item of content) {\n if (item.type === 'text') {\n controller.enqueue(item.text);\n }\n }\n }\n}\n","/**\n * Configuration options and helper callback methods for stream lifecycle events.\n */\nexport interface StreamCallbacks {\n /** `onStart`: Called once when the stream is initialized. */\n onStart?: () => Promise<void> | void;\n\n /**\n * `onCompletion`: Called for each tokenized message.\n *\n * @deprecated Use `onFinal` instead.\n */\n onCompletion?: (completion: string) => Promise<void> | void;\n\n /** `onFinal`: Called once when the stream is closed with the final completion message. */\n onFinal?: (completion: string) => Promise<void> | void;\n\n /** `onToken`: Called for each tokenized message. */\n onToken?: (token: string) => Promise<void> | void;\n\n /** `onText`: Called for each text chunk. */\n onText?: (text: string) => Promise<void> | void;\n}\n\n/**\n * Creates a transform stream that encodes input messages and invokes optional callback functions.\n * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.\n * - `onStart`: Called once when the stream is initialized.\n * - `onToken`: Called for each tokenized message.\n * - `onCompletion`: Called every time a completion message is received. This can occur multiple times when using e.g. OpenAI functions\n * - `onFinal`: Called once when the stream is closed with the final completion message.\n *\n * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.\n *\n * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.\n * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.\n *\n * @example\n * const callbacks = {\n * onStart: async () => console.log('Stream started'),\n * onToken: async (token) => console.log(`Token: ${token}`),\n * onCompletion: async (completion) => console.log(`Completion: ${completion}`)\n * onFinal: async () => data.close()\n * };\n * const transformer = createCallbacksTransformer(callbacks);\n */\nexport function createCallbacksTransformer(\n callbacks: StreamCallbacks | undefined = {},\n): TransformStream<string, Uint8Array> {\n const textEncoder = new TextEncoder();\n let aggregatedResponse = '';\n\n return new TransformStream({\n async start(): Promise<void> {\n if (callbacks.onStart) await callbacks.onStart();\n },\n\n async transform(message, controller): Promise<void> {\n controller.enqueue(textEncoder.encode(message));\n\n aggregatedResponse += message;\n\n if (callbacks.onToken) await callbacks.onToken(message);\n if (callbacks.onText && typeof message === 'string') {\n await callbacks.onText(message);\n }\n },\n\n async flush(): Promise<void> {\n if (callbacks.onCompletion) {\n await callbacks.onCompletion(aggregatedResponse);\n }\n if (callbacks.onFinal) {\n await callbacks.onFinal(aggregatedResponse);\n }\n },\n });\n}\n","import { convertAsyncIteratorToReadableStream } from '@ai-sdk/provider-utils';\nimport { formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from '../core/data-stream/data-stream-writer';\nimport { mergeStreams } from '../core/util/merge-streams';\nimport { prepareResponseHeaders } from '../core/util/prepare-response-headers';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\nimport { StreamData } from './stream-data';\n\ntype EngineResponse = {\n delta: string;\n};\n\nfunction toDataStreamInternal(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n const trimStart = trimStartOfStream();\n\n return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())\n .pipeThrough(\n new TransformStream({\n async transform(message, controller): Promise<void> {\n controller.enqueue(trimStart(message.delta));\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart('text', chunk));\n },\n }),\n );\n}\n\nexport function toDataStream(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n return toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n}\n\nexport function toDataStreamResponse(\n stream: AsyncIterable<EngineResponse>,\n options: {\n init?: ResponseInit;\n data?: StreamData;\n callbacks?: StreamCallbacks;\n } = {},\n) {\n const { init, data, callbacks } = options;\n const dataStream = toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n const responseStream = data\n ? mergeStreams(data.stream, dataStream)\n : dataStream;\n\n return new Response(responseStream, {\n status: init?.status ?? 200,\n statusText: init?.statusText,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n });\n}\n\nexport function mergeIntoDataStream(\n stream: AsyncIterable<EngineResponse>,\n options: {\n dataStream: DataStreamWriter;\n callbacks?: StreamCallbacks;\n },\n) {\n options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));\n}\n\nfunction trimStartOfStream(): (text: string) => string {\n let isStreamStart = true;\n\n return (text: string): string => {\n if (isStreamStart) {\n text = text.trimStart();\n if (text) isStreamStart = false;\n }\n return text;\n };\n}\n","import { JSONValue, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { HANGING_STREAM_WARNING_TIME_MS } from '../util/constants';\n\n/**\n * A stream wrapper to send custom JSON-encoded data back to the client.\n *\n * @deprecated Please use `createDataStream`, `createDataStreamResponse`, and `pipeDataStreamToResponse` instead.\n */\nexport class StreamData {\n private encoder = new TextEncoder();\n\n private controller: ReadableStreamController<Uint8Array> | null = null;\n public stream: ReadableStream<Uint8Array>;\n\n private isClosed: boolean = false;\n private warningTimeout: NodeJS.Timeout | null = null;\n\n constructor() {\n const self = this;\n\n this.stream = new ReadableStream({\n start: async controller => {\n self.controller = controller;\n\n // Set a timeout to show a warning if the stream is not closed within 3 seconds\n if (process.env.NODE_ENV === 'development') {\n self.warningTimeout = setTimeout(() => {\n console.warn(\n 'The data stream is hanging. Did you forget to close it with `data.close()`?',\n );\n }, HANGING_STREAM_WARNING_TIME_MS);\n }\n },\n pull: controller => {\n // No-op: we don't need to do anything special on pull\n },\n cancel: reason => {\n this.isClosed = true;\n },\n });\n }\n\n async close(): Promise<void> {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.close();\n this.isClosed = true;\n\n // Clear the warning timeout if the stream is closed\n if (this.warningTimeout) {\n clearTimeout(this.warningTimeout);\n }\n }\n\n append(value: JSONValue): void {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart('data', [value])),\n );\n }\n\n appendMessageAnnotation(value: JSONValue): void {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart('message_annotations', [value])),\n );\n }\n}\n","/**\n * Warning time for notifying developers that a stream is hanging in dev mode\n * using a console.warn.\n */\nexport const HANGING_STREAM_WARNING_TIME_MS = 15 * 1000;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,IAAAA,oBAOO;AAkBP,IAAAC,0BAA2B;;;AC1B3B,IAAAC,mBAA2B;;;ACA3B,sBAAuD;AAGhD,SAAS,iBAAiB;AAAA,EAC/B;AAAA,EACA,UAAU,MAAM;AAAA;AAClB,GAGqC;AACnC,MAAI;AAEJ,QAAM,wBAAyC,CAAC;AAEhD,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,eAAe;AACnB,mBAAa;AAAA,IACf;AAAA,EACF,CAAC;AAED,WAAS,YAAY,MAAwB;AAC3C,QAAI;AACF,iBAAW,QAAQ,IAAI;AAAA,IACzB,SAAS,OAAO;AAAA,IAEhB;AAAA,EACF;AAEA,MAAI;AACF,UAAM,SAAS,QAAQ;AAAA,MACrB,UAAU,MAAM;AACd,wBAAY,sCAAqB,QAAQ,CAAC,IAAI,CAAC,CAAC;AAAA,MAClD;AAAA,MACA,uBAAuB,YAAY;AACjC,wBAAY,sCAAqB,uBAAuB,CAAC,UAAU,CAAC,CAAC;AAAA,MACvE;AAAA,MACA,MAAM,WAAW;AACf,8BAAsB;AAAA,WACnB,YAAY;AACX,kBAAM,SAAS,UAAU,UAAU;AACnC,mBAAO,MAAM;AACX,oBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,kBAAI;AAAM;AACV,0BAAY,KAAK;AAAA,YACnB;AAAA,UACF,GAAG,EAAE,MAAM,WAAS;AAClB,4BAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,UAC3D,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,QAAQ;AACV,4BAAsB;AAAA,QACpB,OAAO,MAAM,WAAS;AACpB,0BAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,QAC3D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,oBAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,EAC3D;AAMA,QAAM,iBAAgC,IAAI,QAAQ,OAAM,YAAW;AACjE,WAAO,sBAAsB,SAAS,GAAG;AACvC,YAAM,sBAAsB,MAAM;AAAA,IACpC;AACA,YAAQ;AAAA,EACV,CAAC;AAED,iBAAe,QAAQ,MAAM;AAC3B,QAAI;AACF,iBAAW,MAAM;AAAA,IACnB,SAAS,OAAO;AAAA,IAEhB;AAAA,EACF,CAAC;AAED,SAAO;AACT;;;ACpFO,SAAS,uBACd,SACA;AAAA,EACE;AAAA,EACA;AACF,GACA;AACA,QAAM,kBAAkB,IAAI,QAAQ,4BAAW,CAAC,CAAC;AAEjD,MAAI,CAAC,gBAAgB,IAAI,cAAc,GAAG;AACxC,oBAAgB,IAAI,gBAAgB,WAAW;AAAA,EACjD;AAEA,MAAI,sBAAsB,QAAW;AACnC,oBAAgB,IAAI,2BAA2B,iBAAiB;AAAA,EAClE;AAEA,SAAO;AACT;;;ACdO,SAAS,yBAAyB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGa;AACX,SAAO,IAAI;AAAA,IACT,iBAAiB,EAAE,SAAS,QAAQ,CAAC,EAAE,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC1E;AAAA,MACE;AAAA,MACA;AAAA,MACA,SAAS,uBAAuB,SAAS;AAAA,QACvC,aAAa;AAAA,QACb,mBAAmB;AAAA,MACrB,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACzBO,SAAS,2BACd,SACA;AAAA,EACE;AAAA,EACA;AACF,GACA;AACA,QAAM,kBAA8D,CAAC;AAErE,MAAI,WAAW,MAAM;AACnB,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,sBAAgB,GAAG,IAAI;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,gBAAgB,cAAc,KAAK,MAAM;AAC3C,oBAAgB,cAAc,IAAI;AAAA,EACpC;AAEA,MAAI,sBAAsB,QAAW;AACnC,oBAAgB,yBAAyB,IAAI;AAAA,EAC/C;AAEA,SAAO;AACT;;;ACnBO,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAMS;AACP,WAAS,UAAU,0BAAU,KAAK,YAAY,OAAO;AAErD,QAAM,SAAS,OAAO,UAAU;AAChC,QAAM,OAAO,YAAY;AACvB,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI;AAAM;AACV,iBAAS,MAAM,KAAK;AAAA,MACtB;AAAA,IACF,SAAS,OAAO;AACd,YAAM;AAAA,IACR,UAAE;AACA,eAAS,IAAI;AAAA,IACf;AAAA,EACF;AAEA,OAAK;AACP;;;AC9BO,SAAS,yBACd,UACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAIM;AACN,wBAAsB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS,2BAA2B,SAAS;AAAA,MAC3C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,IACD,QAAQ,iBAAiB,EAAE,SAAS,QAAQ,CAAC,EAAE;AAAA,MAC7C,IAAI,kBAAkB;AAAA,IACxB;AAAA,EACF,CAAC;AACH;;;AC/BA,sBAA2B;AAE3B,IAAM,OAAO;AACb,IAAM,SAAS,mBAAmB,IAAI;AACtC,IAAM,SAAS,OAAO,IAAI,MAAM;AAJhC;AAMO,IAAM,uBAAN,cAAmC,2BAAW;AAAA,EAMnD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAIG;AACD,UAAM;AAAA,MACJ;AAAA,MACA,SAAS,kCAAkC,SAAS,KAAK,OAAO;AAAA,IAClE,CAAC;AAjBH,SAAkB,MAAU;AAmB1B,SAAK,YAAY;AACjB,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,OAAO,WAAW,OAA+C;AAC/D,WAAO,2BAAW,UAAU,OAAO,MAAM;AAAA,EAC3C;AACF;AA1BoB;;;ACPpB,IAAAC,mBAA6B;AAC7B,4BAA8C;;;ACD9C,eAAsB,MAAM,WAAmC;AAC7D,SAAO,cAAc,SACjB,QAAQ,QAAQ,IAChB,IAAI,QAAQ,aAAW,WAAW,SAAS,SAAS,CAAC;AAC3D;;;ACJA,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAWO,IAAM,aAAN,cAAyB,4BAAW;AAAA,EAQzC,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAhBzB,SAAkBG,OAAU;AAkB1B,SAAK,SAAS;AACd,SAAK,SAAS;AAGd,SAAK,YAAY,OAAO,OAAO,SAAS,CAAC;AAAA,EAC3C;AAAA,EAEA,OAAO,WAAW,OAAqC;AACrD,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA5BoBE,MAAAD;;;AFCb,IAAM,8BACX,CAAC;AAAA,EACC,aAAa;AAAA,EACb,mBAAmB;AAAA,EACnB,gBAAgB;AAClB,IAAI,CAAC,MACL,OAAe,MACb,6BAA6B,GAAG;AAAA,EAC9B;AAAA,EACA,WAAW;AAAA,EACX;AACF,CAAC;AAEL,eAAe,6BACb,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA,SAAoB,CAAC,GACJ;AACjB,MAAI;AACF,WAAO,MAAM,EAAE;AAAA,EACjB,SAAS,OAAO;AACd,YAAI,oCAAa,KAAK,GAAG;AACvB,YAAM;AAAA,IACR;AAEA,QAAI,eAAe,GAAG;AACpB,YAAM;AAAA,IACR;AAEA,UAAM,mBAAe,uCAAgB,KAAK;AAC1C,UAAM,YAAY,CAAC,GAAG,QAAQ,KAAK;AACnC,UAAM,YAAY,UAAU;AAE5B,QAAI,YAAY,YAAY;AAC1B,YAAM,IAAI,WAAW;AAAA,QACnB,SAAS,gBAAgB,SAAS,0BAA0B,YAAY;AAAA,QACxE,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,QACE,iBAAiB,SACjB,8BAAa,WAAW,KAAK,KAC7B,MAAM,gBAAgB,QACtB,aAAa,YACb;AACA,YAAM,MAAM,SAAS;AACrB,aAAO;AAAA,QACL;AAAA,QACA,EAAE,YAAY,WAAW,gBAAgB,WAAW,cAAc;AAAA,QAClE;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,GAAG;AACnB,YAAM;AAAA,IACR;AAEA,UAAM,IAAI,WAAW;AAAA,MACnB,SAAS,gBAAgB,SAAS,wCAAwC,YAAY;AAAA,MACtF,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AACF;;;AGzEO,SAAS,eAAe;AAAA,EAC7B;AACF,GAKE;AACA,MAAI,cAAc,MAAM;AACtB,QAAI,CAAC,OAAO,UAAU,UAAU,GAAG;AACjC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,aAAa,GAAG;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,mBAAmB,kCAAc;AAEvC,SAAO;AAAA,IACL,YAAY;AAAA,IACZ,OAAO,4BAA4B,EAAE,YAAY,iBAAiB,CAAC;AAAA,EACrE;AACF;;;ACvCO,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AACF,GAGG;AACD,SAAO;AAAA;AAAA,IAEL,kBAAkB,GAAG,WAAW,IAC9B,uCAAW,eAAc,OAAO,IAAI,UAAU,UAAU,KAAK,EAC/D;AAAA,IACA,iBAAiB,uCAAW;AAAA;AAAA,IAG5B,kBAAkB;AAAA,IAClB,2BAA2B,uCAAW;AAAA,EACxC;AACF;;;AChBO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAKe;AAdf,MAAAE;AAeE,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B,eAAe,MAAM;AAAA;AAAA,IAGrB,GAAG,OAAO,QAAQ,QAAQ,EAAE,OAAO,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AAC/D,iBAAW,eAAe,GAAG,EAAE,IAAI;AACnC,aAAO;AAAA,IACT,GAAG,CAAC,CAAe;AAAA;AAAA,IAGnB,GAAG,OAAO,SAAQA,OAAA,uCAAW,aAAX,OAAAA,OAAuB,CAAC,CAAC,EAAE;AAAA,MAC3C,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AAC5B,mBAAW,yBAAyB,GAAG,EAAE,IAAI;AAC7C,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA;AAAA,IAGA,GAAG,OAAO,QAAQ,4BAAW,CAAC,CAAC,EAAE,OAAO,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AACpE,UAAI,UAAU,QAAW;AACvB,mBAAW,sBAAsB,GAAG,EAAE,IAAI;AAAA,MAC5C;AACA,aAAO;AAAA,IACT,GAAG,CAAC,CAAe;AAAA,EACrB;AACF;;;AC1CA,iBAA8B;;;ACKvB,IAAM,aAAqB;AAAA,EAChC,YAAkB;AAChB,WAAO;AAAA,EACT;AAAA,EAEA,gBACEC,QACA,MACA,MACA,MACiB;AACjB,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AACA,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AACA,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AACF;AAEA,IAAM,WAAiB;AAAA,EACrB,cAAc;AACZ,WAAO;AAAA,EACT;AAAA,EACA,eAAe;AACb,WAAO;AAAA,EACT;AAAA,EACA,gBAAgB;AACd,WAAO;AAAA,EACT;AAAA,EACA,WAAW;AACT,WAAO;AAAA,EACT;AAAA,EACA,UAAU;AACR,WAAO;AAAA,EACT;AAAA,EACA,WAAW;AACT,WAAO;AAAA,EACT;AAAA,EACA,YAAY;AACV,WAAO;AAAA,EACT;AAAA,EACA,aAAa;AACX,WAAO;AAAA,EACT;AAAA,EACA,MAAM;AACJ,WAAO;AAAA,EACT;AAAA,EACA,cAAc;AACZ,WAAO;AAAA,EACT;AAAA,EACA,kBAAkB;AAChB,WAAO;AAAA,EACT;AACF;AAEA,IAAM,kBAA+B;AAAA,EACnC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AACd;;;ADjEO,SAAS,UAAU;AAAA,EACxB,YAAY;AAAA,EACZ;AACF,IAGI,CAAC,GAAW;AACd,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ;AACV,WAAO;AAAA,EACT;AAEA,SAAO,iBAAM,UAAU,IAAI;AAC7B;;;AEnBA,IAAAC,cAAyD;AAElD,SAAS,WAAc;AAAA,EAC5B,MAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAMG;AACD,SAAO,OAAO,gBAAgBA,QAAM,EAAE,WAAW,GAAG,OAAM,SAAQ;AAChE,QAAI;AACF,YAAM,SAAS,MAAM,GAAG,IAAI;AAE5B,UAAI,aAAa;AACf,aAAK,IAAI;AAAA,MACX;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI;AACF,YAAI,iBAAiB,OAAO;AAC1B,eAAK,gBAAgB;AAAA,YACnB,MAAM,MAAM;AAAA,YACZ,SAAS,MAAM;AAAA,YACf,OAAO,MAAM;AAAA,UACf,CAAC;AACD,eAAK,UAAU;AAAA,YACb,MAAM,2BAAe;AAAA,YACrB,SAAS,MAAM;AAAA,UACjB,CAAC;AAAA,QACH,OAAO;AACL,eAAK,UAAU,EAAE,MAAM,2BAAe,MAAM,CAAC;AAAA,QAC/C;AAAA,MACF,UAAE;AAEA,aAAK,IAAI;AAAA,MACX;AAEA,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;;;AC5CO,SAAS,0BAA0B;AAAA,EACxC;AAAA,EACA;AACF,GASe;AAEb,OAAI,uCAAW,eAAc,MAAM;AACjC,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,OAAO,QAAQ,UAAU,EAAE,OAAO,CAACC,aAAY,CAAC,KAAK,KAAK,MAAM;AACrE,QAAI,UAAU,QAAW;AACvB,aAAOA;AAAA,IACT;AAGA,QACE,OAAO,UAAU,YACjB,WAAW,SACX,OAAO,MAAM,UAAU,YACvB;AAEA,WAAI,uCAAW,kBAAiB,OAAO;AACrC,eAAOA;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,MAAM;AAE3B,aAAO,WAAW,SACdA,cACA,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,OAAO;AAAA,IACrC;AAGA,QACE,OAAO,UAAU,YACjB,YAAY,SACZ,OAAO,MAAM,WAAW,YACxB;AAEA,WAAI,uCAAW,mBAAkB,OAAO;AACtC,eAAOA;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,OAAO;AAE5B,aAAO,WAAW,SACdA,cACA,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,OAAO;AAAA,IACrC;AAGA,WAAO,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,MAAM;AAAA,EACvC,GAAG,CAAC,CAAC;AACP;;;AC3CA,eAAsB,MAAa;AAAA,EACjC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAC1B,GAiCgC;AAC9B,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,WAAW;AAAA,EACzB,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB,EAAE,aAAa,YAAY,UAAU,CAAC;AAAA,QAC/D,GAAG;AAAA,QACH,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAChB,YAAM,EAAE,WAAW,OAAO,YAAY,IAAI,MAAM;AAAA,QAAM;AAAA;AAAA,UAEpD,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa,EAAE,OAAO,MAAM,CAAC,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,cACtD;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AAvGnC,kBAAAC;AAwGY,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC,QAAQ,CAAC,KAAK;AAAA,gBACd;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMC,aAAY,cAAc,WAAW,CAAC;AAC5C,oBAAMC,UAAQF,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACN,cAAc,WAAW;AAAA,wBAAI,CAAAC,eAC3B,KAAK,UAAUA,UAAS;AAAA,sBAC1B;AAAA,oBACJ;AAAA,oBACA,mBAAmBC,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO;AAAA,gBACL,WAAAD;AAAA,gBACA,OAAAC;AAAA,gBACA,aAAa,cAAc;AAAA,cAC7B;AAAA,YACF;AAAA,UACF,CAAC;AAAA;AAAA,MACH;AAEA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,gBAAgB,EAAE,QAAQ,MAAM,KAAK,UAAU,SAAS,EAAE;AAAA,YAC1D,mBAAmB,MAAM;AAAA,UAC3B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,mBAAmB,EAAE,OAAO,WAAW,OAAO,YAAY,CAAC;AAAA,IACxE;AAAA,EACF,CAAC;AACH;AAEA,IAAM,qBAAN,MAA8D;AAAA,EAM5D,YAAY,SAKT;AACD,SAAK,QAAQ,QAAQ;AACrB,SAAK,YAAY,QAAQ;AACzB,SAAK,QAAQ,QAAQ;AACrB,SAAK,cAAc,QAAQ;AAAA,EAC7B;AACF;;;ACjKO,SAAS,WAAc,OAAY,WAA0B;AAClE,MAAI,aAAa,GAAG;AAClB,UAAM,IAAI,MAAM,kCAAkC;AAAA,EACpD;AAEA,QAAM,SAAS,CAAC;AAChB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;;;ACQA,eAAsB,UAAiB;AAAA,EACrC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAC1B,GAiCoC;AAClC,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,WAAW;AAAA,EACzB,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB,EAAE,aAAa,gBAAgB,UAAU,CAAC;AAAA,QACnE,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,OAAO,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,QACxD;AAAA,MACF;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAChB,YAAM,uBAAuB,MAAM;AAInC,UAAI,wBAAwB,MAAM;AAChC,cAAM,EAAE,YAAAC,aAAY,MAAM,IAAI,MAAM,MAAM,MAAM;AAE9C,iBAAO,WAAW;AAAA,YAChB,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa;AAAA,kBACX,OAAO,MAAM,OAAO,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AAtHrC,kBAAAC;AAuHc,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMD,cAAa,cAAc;AACjC,oBAAME,UAAQD,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACND,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,oBACzD;AAAA,oBACA,mBAAmBE,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,YAAAF,aAAY,OAAAE,OAAM;AAAA,YAC7B;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,aAAK;AAAA,UACH,0BAA0B;AAAA,YACxB;AAAA,YACA,YAAY;AAAA,cACV,iBAAiB;AAAA,gBACf,QAAQ,MACNF,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,cACzD;AAAA,cACA,mBAAmB,MAAM;AAAA,YAC3B;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO,IAAI,uBAAuB,EAAE,QAAQ,YAAAA,aAAY,MAAM,CAAC;AAAA,MACjE;AAGA,YAAM,cAAc,WAAW,QAAQ,oBAAoB;AAG3D,YAAM,aAA+B,CAAC;AACtC,UAAI,SAAS;AAEb,iBAAW,SAAS,aAAa;AAC/B,cAAM,EAAE,YAAY,oBAAoB,MAAM,IAAI,MAAM,MAAM,MAAM;AAElE,iBAAO,WAAW;AAAA,YAChB,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa;AAAA,kBACX,OAAO,MAAM,MAAM,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,gBACvD;AAAA,cACF;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AA7LrC,kBAAAC;AA8Lc,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMD,cAAa,cAAc;AACjC,oBAAME,UAAQD,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACND,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,oBACzD;AAAA,oBACA,mBAAmBE,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,YAAAF,aAAY,OAAAE,OAAM;AAAA,YAC7B;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,mBAAW,KAAK,GAAG,kBAAkB;AACrC,kBAAU,MAAM;AAAA,MAClB;AAEA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,iBAAiB;AAAA,cACf,QAAQ,MACN,WAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,YACzD;AAAA,YACA,mBAAmB;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,uBAAuB;AAAA,QAChC;AAAA,QACA;AAAA,QACA,OAAO,EAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,IAAM,yBAAN,MAAsE;AAAA,EAKpE,YAAY,SAIT;AACD,SAAK,SAAS,QAAQ;AACtB,SAAK,aAAa,QAAQ;AAC1B,SAAK,QAAQ,QAAQ;AAAA,EACvB;AACF;;;AChQA,IAAAC,yBAA0C;AAmB1C,eAAsB,cAAc;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AACF,GAsDiC;AAC/B,QAAM,EAAE,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE9D,QAAM,EAAE,OAAO,IAAI,MAAM;AAAA,IAAM,MAC7B,MAAM,WAAW;AAAA,MACf;AAAA,MACA,GAAG,gBAAK;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA,iBAAiB,4CAAmB,CAAC;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,2BAA2B,EAAE,cAAc,OAAO,CAAC;AAChE;AAEA,IAAM,6BAAN,MAAgE;AAAA,EAG9D,YAAY,SAA0C;AACpD,SAAK,SAAS,QAAQ,aAAa,IAAI,aAAW;AAAA,MAChD;AAAA,MACA,IAAI,aAAa;AACf,mBAAO,kDAA0B,KAAK,MAAM;AAAA,MAC9C;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,OAAO,CAAC;AAAA,EACtB;AACF;;;AClHA,IAAAC,yBAAiD;;;ACDjD,IAAAC,mBAA2B;AAI3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AANhC,IAAAE;AAmBO,IAAM,yBAAN,cAAqC,4BAAW;AAAA,EAkBrD,YAAY;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,MAAAC;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAMG;AACD,UAAM,EAAE,MAAAJ,OAAM,SAAS,MAAM,CAAC;AA9BhC,SAAkBG,OAAU;AAgC1B,SAAK,OAAOC;AACZ,SAAK,WAAW;AAChB,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,OAAO,WAAW,OAAiD;AACjE,WAAO,4BAAW,UAAU,OAAOH,OAAM;AAAA,EAC3C;AACF;AAxCoBE,MAAAD;;;ACpBpB,IAAAG,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,gBAAN,cAA4B,4BAAW;AAAA,EAO5C,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,SAAS,OACf,sBAAsB,GAAG,KAAK,UAAU,IAAI,UAAU,KACtD,sBAAsB,GAAG,KAAK,KAAK;AAAA,EACzC,GAMG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AArBhC,SAAkBG,OAAU;AAuB1B,SAAK,MAAM;AACX,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,OAAO,WAAW,OAAwC;AACxD,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA/BoBE,MAAAD;;;ACLpB,eAAsB,SAAS;AAAA,EAC7B;AAAA,EACA,sBAAsB;AACxB,GAMG;AAXH,MAAAE;AAYE,QAAM,UAAU,IAAI,SAAS;AAC7B,MAAI;AACF,UAAM,WAAW,MAAM,oBAAoB,OAAO;AAElD,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,cAAc;AAAA,QACtB,KAAK;AAAA,QACL,YAAY,SAAS;AAAA,QACrB,YAAY,SAAS;AAAA,MACvB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM,IAAI,WAAW,MAAM,SAAS,YAAY,CAAC;AAAA,MACjD,WAAUA,OAAA,SAAS,QAAQ,IAAI,cAAc,MAAnC,OAAAA,OAAwC;AAAA,IACpD;AAAA,EACF,SAAS,OAAO;AACd,QAAI,cAAc,WAAW,KAAK,GAAG;AACnC,YAAM;AAAA,IACR;AAEA,UAAM,IAAI,cAAc,EAAE,KAAK,SAAS,OAAO,MAAM,CAAC;AAAA,EACxD;AACF;;;ACnCA,IAAM,qBAAqB;AAAA,EACzB,EAAE,UAAU,aAAsB,OAAO,CAAC,IAAM,IAAM,EAAI,EAAE;AAAA,EAC5D,EAAE,UAAU,aAAsB,OAAO,CAAC,KAAM,IAAM,IAAM,EAAI,EAAE;AAAA,EAClE,EAAE,UAAU,cAAuB,OAAO,CAAC,KAAM,GAAI,EAAE;AAAA,EACvD,EAAE,UAAU,cAAuB,OAAO,CAAC,IAAM,IAAM,IAAM,EAAI,EAAE;AACrE;AAEO,SAAS,oBACd,OACqE;AACrE,aAAW,EAAE,OAAO,SAAS,KAAK,oBAAoB;AACpD,QACE,MAAM,UAAU,MAAM,UACtB,MAAM,MAAM,CAAC,MAAM,UAAU,MAAM,KAAK,MAAM,IAAI,GAClD;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;ACpBA,IAAAC,yBAGO;;;ACHP,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,0BAAN,cAAsC,4BAAW;AAAA,EAKtD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA,UAAU,+FAA+F,OAAO,OAAO;AAAA,EACzH,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AAbhC,SAAkBG,OAAU;AAe1B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,OAAO,WAAW,OAAkD;AAClE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AArBoBE,MAAAD;;;ADFpB,iBAAkB;AAUX,IAAM,oBAA4C,aAAE,MAAM;AAAA,EAC/D,aAAE,OAAO;AAAA,EACT,aAAE,WAAW,UAAU;AAAA,EACvB,aAAE,WAAW,WAAW;AAAA,EACxB,aAAE;AAAA;AAAA,IAEA,CAAC,UAAiC;AArBtC,UAAAE,MAAA;AAsBM,oBAAAA,OAAA,WAAW,WAAX,gBAAAA,KAAmB,SAAS,WAA5B,YAAsC;AAAA;AAAA,IACxC,EAAE,SAAS,mBAAmB;AAAA,EAChC;AACF,CAAC;AAQM,SAAS,iCAAiC,SAA8B;AAC7E,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,mBAAmB,aAAa;AAClC,eAAO,kDAA0B,IAAI,WAAW,OAAO,CAAC;AAAA,EAC1D;AAEA,aAAO,kDAA0B,OAAO;AAC1C;AAQO,SAAS,+BACd,SACY;AACZ,MAAI,mBAAmB,YAAY;AACjC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,YAAY,UAAU;AAC/B,QAAI;AACF,iBAAO,kDAA0B,OAAO;AAAA,IAC1C,SAAS,OAAO;AACd,YAAM,IAAI,wBAAwB;AAAA,QAChC,SACE;AAAA,QACF;AAAA,QACA,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,mBAAmB,aAAa;AAClC,WAAO,IAAI,WAAW,OAAO;AAAA,EAC/B;AAEA,QAAM,IAAI,wBAAwB,EAAE,QAAQ,CAAC;AAC/C;AAQO,SAAS,wBAAwB,YAAgC;AACtE,MAAI;AACF,WAAO,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,EAC5C,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,mCAAmC;AAAA,EACrD;AACF;;;AE1FA,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,0BAAN,cAAsC,4BAAW;AAAA,EAKtD,YAAY;AAAA,IACV;AAAA,IACA,UAAU,0BAA0B,IAAI;AAAA,EAC1C,GAGG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAXzB,SAAkBG,OAAU;AAa1B,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,OAAO,WAAW,OAAkD;AAClE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AAnBoBE,MAAAD;;;ACPb,SAAS,aAAa,SAG3B;AACA,MAAI;AACF,UAAM,CAAC,QAAQ,aAAa,IAAI,QAAQ,MAAM,GAAG;AACjD,WAAO;AAAA,MACL,UAAU,OAAO,MAAM,GAAG,EAAE,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC3C;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,WAAO;AAAA,MACL,UAAU;AAAA,MACV,eAAe;AAAA,IACjB;AAAA,EACF;AACF;;;ACIA,eAAsB,6BAA6B;AAAA,EACjD;AAAA,EACA,yBAAyB;AAAA,EACzB,mBAAmB,MAAM;AAAA,EACzB,yBAAyB;AAC3B,GAKmC;AACjC,QAAM,mBAAmB,MAAM;AAAA,IAC7B,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAI,OAAO,UAAU,OACjB,CAAC,EAAE,MAAM,UAAmB,SAAS,OAAO,OAAO,CAAC,IACpD,CAAC;AAAA,IACL,GAAG,OAAO,SAAS;AAAA,MAAI,aACrB,8BAA8B,SAAS,gBAAgB;AAAA,IACzD;AAAA,EACF;AACF;AASO,SAAS,8BACd,SACA,kBAIwB;AACxB,QAAM,OAAO,QAAQ;AACrB,UAAQ,MAAM;AAAA,IACZ,KAAK,UAAU;AACb,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ;AAAA,QACjB,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AACX,UAAI,OAAO,QAAQ,YAAY,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AAAA,UACjD,kBAAkB,QAAQ;AAAA,QAC5B;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QACd,IAAI,UAAQ,+BAA+B,MAAM,gBAAgB,CAAC,EAElE,OAAO,UAAQ,KAAK,SAAS,UAAU,KAAK,SAAS,EAAE;AAAA,QAC1D,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,UAAI,OAAO,QAAQ,YAAY,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AAAA,UACjD,kBAAkB,QAAQ;AAAA,QAC5B;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QACd;AAAA;AAAA,UAEC,UAAQ,KAAK,SAAS,UAAU,KAAK,SAAS;AAAA,QAChD,EACC,IAAI,UAAQ;AACX,gBAAM,EAAE,+BAA+B,GAAG,KAAK,IAAI;AACnD,iBAAO;AAAA,YACL,GAAG;AAAA,YACH,kBAAkB;AAAA,UACpB;AAAA,QACF,CAAC;AAAA,QACH,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AACX,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,IAAI,WAAS;AAAA,UACpC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA,UACb,SAAS,KAAK;AAAA,UACd,SAAS,KAAK;AAAA,UACd,kBAAkB,KAAK;AAAA,QACzB,EAAE;AAAA,QACF,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,wBAAwB,EAAE,MAAM,iBAAiB,CAAC;AAAA,IAC9D;AAAA,EACF;AACF;AAKA,eAAe,eACb,UACA,wBACA,wBACA,kBAC6E;AAC7E,QAAM,OAAO,SACV,OAAO,aAAW,QAAQ,SAAS,MAAM,EACzC,IAAI,aAAW,QAAQ,OAAO,EAC9B;AAAA,IAAO,CAAC,YACP,MAAM,QAAQ,OAAO;AAAA,EACvB,EACC,KAAK,EACL;AAAA,IACC,CAAC,SACC,KAAK,SAAS,WAAW,KAAK,SAAS;AAAA,EAC3C,EAKC;AAAA,IACC,CAAC,SACC,EAAE,KAAK,SAAS,WAAW,2BAA2B;AAAA,EAC1D,EACC,IAAI,UAAS,KAAK,SAAS,UAAU,KAAK,QAAQ,KAAK,IAAK,EAC5D;AAAA,IAAI;AAAA;AAAA,MAEH,OAAO,SAAS,aACf,KAAK,WAAW,OAAO,KAAK,KAAK,WAAW,QAAQ,KACjD,IAAI,IAAI,IAAI,IACZ;AAAA;AAAA,EACN,EACC,OAAO,CAAC,UAAwB,iBAAiB,GAAG,EAIpD,OAAO,SAAO,CAAC,iBAAiB,GAAG,CAAC;AAGvC,QAAM,mBAAmB,MAAM,QAAQ;AAAA,IACrC,KAAK,IAAI,OAAM,SAAQ;AAAA,MACrB;AAAA,MACA,MAAM,MAAM,uBAAuB,EAAE,IAAI,CAAC;AAAA,IAC5C,EAAE;AAAA,EACJ;AAEA,SAAO,OAAO;AAAA,IACZ,iBAAiB,IAAI,CAAC,EAAE,KAAK,KAAK,MAAM,CAAC,IAAI,SAAS,GAAG,IAAI,CAAC;AAAA,EAChE;AACF;AAUA,SAAS,+BACP,MACA,kBAO0B;AArN5B,MAAAE;AAsNE,MAAI,KAAK,SAAS,QAAQ;AACxB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,WAA+B,KAAK;AACxC,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,QAAM,OAAO,KAAK;AAClB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,KAAK;AACZ;AAAA,IACF,KAAK;AACH,aAAO,KAAK;AACZ;AAAA,IACF;AACE,YAAM,IAAI,MAAM,0BAA0B,IAAI,EAAE;AAAA,EACpD;AAIA,MAAI;AACF,cAAU,OAAO,SAAS,WAAW,IAAI,IAAI,IAAI,IAAI;AAAA,EACvD,SAAS,OAAO;AACd,cAAU;AAAA,EACZ;AAKA,MAAI,mBAAmB,KAAK;AAE1B,QAAI,QAAQ,aAAa,SAAS;AAChC,YAAM,EAAE,UAAU,iBAAiB,cAAc,IAAI;AAAA,QACnD,QAAQ,SAAS;AAAA,MACnB;AAEA,UAAI,mBAAmB,QAAQ,iBAAiB,MAAM;AACpD,cAAM,IAAI,MAAM,mCAAmC,IAAI,EAAE;AAAA,MAC3D;AAEA,iBAAW;AACX,uBAAiB,+BAA+B,aAAa;AAAA,IAC/D,OAAO;AAML,YAAM,iBAAiB,iBAAiB,QAAQ,SAAS,CAAC;AAC1D,UAAI,gBAAgB;AAClB,yBAAiB,eAAe;AAChC,iDAAa,eAAe;AAAA,MAC9B,OAAO;AACL,yBAAiB;AAAA,MACnB;AAAA,IACF;AAAA,EACF,OAAO;AAGL,qBAAiB,+BAA+B,OAAO;AAAA,EACzD;AAIA,UAAQ,MAAM;AAAA,IACZ,KAAK,SAAS;AAKZ,UAAI,0BAA0B,YAAY;AACxC,oBAAWA,OAAA,oBAAoB,cAAc,MAAlC,OAAAA,OAAuC;AAAA,MACpD;AACA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,OAAO;AAAA,QACP;AAAA,QACA,kBAAkB,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AAEX,UAAI,YAAY,MAAM;AACpB,cAAM,IAAI,MAAM,oCAAoC;AAAA,MACtD;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MACE,0BAA0B,aACtB,iCAAiC,cAAc,IAC/C;AAAA,QACN;AAAA,QACA,kBAAkB,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AACF;;;ACzTO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AACA,MAAI,aAAa,MAAM;AACrB,QAAI,CAAC,OAAO,UAAU,SAAS,GAAG;AAChC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,YAAY,GAAG;AACjB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,eAAe,MAAM;AACvB,QAAI,OAAO,gBAAgB,UAAU;AACnC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,mBAAmB,MAAM;AAC3B,QAAI,OAAO,oBAAoB,UAAU;AACvC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,oBAAoB,MAAM;AAC5B,QAAI,OAAO,qBAAqB,UAAU;AACxC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,CAAC,OAAO,UAAU,IAAI,GAAG;AAC3B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,aAAa,oCAAe;AAAA,IAC5B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eACE,iBAAiB,QAAQ,cAAc,SAAS,IAC5C,gBACA;AAAA,IACN;AAAA,EACF;AACF;;;AC9GA,IAAAC,mBAAmC;AACnC,IAAAC,yBAAkC;AAClC,IAAAC,cAAkB;;;ACFlB,IAAAC,cAAkB;;;ACClB,IAAAC,cAAkB;;;ACAlB,IAAAC,cAAkB;AAEX,IAAM,kBAAwC,cAAE;AAAA,EAAK,MAC1D,cAAE,MAAM;AAAA,IACN,cAAE,KAAK;AAAA,IACP,cAAE,OAAO;AAAA,IACT,cAAE,OAAO;AAAA,IACT,cAAE,QAAQ;AAAA,IACV,cAAE,OAAO,cAAE,OAAO,GAAG,eAAe;AAAA,IACpC,cAAE,MAAM,eAAe;AAAA,EACzB,CAAC;AACH;;;ADDO,IAAM,yBAAsD,cAAE;AAAA,EACnE,cAAE,OAAO;AAAA,EACT,cAAE,OAAO,cAAE,OAAO,GAAG,eAAe;AACtC;;;AEdA,IAAAC,cAAkB;;;ACAlB,IAAAC,cAAkB;AAcX,IAAM,0BAAwD,cAAE;AAAA,EACrE,cAAE,MAAM;AAAA,IACN,cAAE,OAAO,EAAE,MAAM,cAAE,QAAQ,MAAM,GAAG,MAAM,cAAE,OAAO,EAAE,CAAC;AAAA,IACtD,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,OAAO;AAAA,MACvB,MAAM,cAAE,OAAO;AAAA,MACf,UAAU,cAAE,OAAO,EAAE,SAAS;AAAA,IAChC,CAAC;AAAA,EACH,CAAC;AACH;;;ADOO,IAAM,iBAAsC,cAAE,OAAO;AAAA,EAC1D,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,MAAM,cAAE,OAAO;AAAA,EACf,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA6BM,IAAM,kBAAwC,cAAE,OAAO;AAAA,EAC5D,MAAM,cAAE,QAAQ,OAAO;AAAA,EACvB,OAAO,cAAE,MAAM,CAAC,mBAAmB,cAAE,WAAW,GAAG,CAAC,CAAC;AAAA,EACrD,UAAU,cAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA6BM,IAAM,iBAAsC,cAAE,OAAO;AAAA,EAC1D,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,MAAM,cAAE,MAAM,CAAC,mBAAmB,cAAE,WAAW,GAAG,CAAC,CAAC;AAAA,EACpD,UAAU,cAAE,OAAO;AAAA,EACnB,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA+BM,IAAM,qBAA8C,cAAE,OAAO;AAAA,EAClE,MAAM,cAAE,QAAQ,WAAW;AAAA,EAC3B,YAAY,cAAE,OAAO;AAAA,EACrB,UAAU,cAAE,OAAO;AAAA,EACnB,MAAM,cAAE,QAAQ;AAClB,CAAC;AAyCM,IAAM,uBAAkD,cAAE,OAAO;AAAA,EACtE,MAAM,cAAE,QAAQ,aAAa;AAAA,EAC7B,YAAY,cAAE,OAAO;AAAA,EACrB,UAAU,cAAE,OAAO;AAAA,EACnB,QAAQ,cAAE,QAAQ;AAAA,EAClB,SAAS,wBAAwB,SAAS;AAAA,EAC1C,SAAS,cAAE,QAAQ,EAAE,SAAS;AAAA,EAC9B,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;;;AHxJM,IAAM,0BAAwD,cAAE,OAAO;AAAA,EAC5E,MAAM,cAAE,QAAQ,QAAQ;AAAA,EACxB,SAAS,cAAE,OAAO;AAAA,EAClB,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAiBM,IAAM,wBAAoD,cAAE,OAAO;AAAA,EACxE,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,SAAS,cAAE,MAAM;AAAA,IACf,cAAE,OAAO;AAAA,IACT,cAAE,MAAM,cAAE,MAAM,CAAC,gBAAgB,iBAAiB,cAAc,CAAC,CAAC;AAAA,EACpE,CAAC;AAAA,EACD,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAsBM,IAAM,6BACX,cAAE,OAAO;AAAA,EACP,MAAM,cAAE,QAAQ,WAAW;AAAA,EAC3B,SAAS,cAAE,MAAM;AAAA,IACf,cAAE,OAAO;AAAA,IACT,cAAE,MAAM,cAAE,MAAM,CAAC,gBAAgB,kBAAkB,CAAC,CAAC;AAAA,EACvD,CAAC;AAAA,EACD,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAsBI,IAAM,wBAAoD,cAAE,OAAO;AAAA,EACxE,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,SAAS,cAAE,MAAM,oBAAoB;AAAA,EACrC,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAiBM,IAAM,oBAA4C,cAAE,MAAM;AAAA,EAC/D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;;;AK7IM,SAAS,iBACd,QACsC;AACtC,MAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AAC1B,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,IAAI,kCAAkC;AAErE,MAAI,gBAAgB,KAAK,OAAK,MAAM,uBAAuB,GAAG;AAC5D,WAAO;AAAA,EACT,WACE,gBAAgB;AAAA,IACd,OAAK,MAAM,6BAA6B,MAAM;AAAA,EAChD,GACA;AACA,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,mCACP,SAC2E;AAC3E,MACE,OAAO,YAAY,YACnB,YAAY,SACX,QAAQ,SAAS;AAAA,EAChB,QAAQ,SAAS;AAAA,EACjB,qBAAqB;AAAA,EACrB,8BAA8B,UAChC;AACA,WAAO;AAAA,EACT,WACE,OAAO,YAAY,YACnB,YAAY,QACZ,aAAa,YACZ,MAAM,QAAQ,QAAQ,OAAO;AAAA,EAC5B,mCAAmC,UACrC;AACA,WAAO;AAAA,EACT,WACE,OAAO,YAAY,YACnB,YAAY,QACZ,UAAU,WACV,aAAa,WACb,OAAO,QAAQ,YAAY,YAC3B,CAAC,UAAU,QAAQ,aAAa,MAAM,EAAE,SAAS,QAAQ,IAAI,GAC7D;AACA,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;;;AC5CO,SAAS,mBAAmB,aAA0C;AAd7E,MAAAC,MAAA;AAeE,QAAM,QAAuB,CAAC;AAE9B,aAAW,cAAc,aAAa;AACpC,QAAI;AAEJ,QAAI;AACF,YAAM,IAAI,IAAI,WAAW,GAAG;AAAA,IAC9B,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,gBAAgB,WAAW,GAAG,EAAE;AAAA,IAClD;AAEA,YAAQ,IAAI,UAAU;AAAA,MACpB,KAAK;AAAA,MACL,KAAK,UAAU;AACb,aAAIA,OAAA,WAAW,gBAAX,gBAAAA,KAAwB,WAAW,WAAW;AAChD,gBAAM,KAAK,EAAE,MAAM,SAAS,OAAO,IAAI,CAAC;AAAA,QAC1C,OAAO;AACL,cAAI,CAAC,WAAW,aAAa;AAC3B,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,UAAU,WAAW;AAAA,UACvB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAK,SAAS;AACZ,YAAI;AACJ,YAAI;AACJ,YAAI;AAEJ,YAAI;AACF,WAAC,QAAQ,aAAa,IAAI,WAAW,IAAI,MAAM,GAAG;AAClD,qBAAW,OAAO,MAAM,GAAG,EAAE,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,QAC9C,SAAS,OAAO;AACd,gBAAM,IAAI,MAAM,8BAA8B,WAAW,GAAG,EAAE;AAAA,QAChE;AAEA,YAAI,YAAY,QAAQ,iBAAiB,MAAM;AAC7C,gBAAM,IAAI,MAAM,4BAA4B,WAAW,GAAG,EAAE;AAAA,QAC9D;AAEA,aAAI,gBAAW,gBAAX,mBAAwB,WAAW,WAAW;AAChD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,OAAO,+BAA+B,aAAa;AAAA,UACrD,CAAC;AAAA,QACH,YAAW,gBAAW,gBAAX,mBAAwB,WAAW,UAAU;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,cACJ,+BAA+B,aAAa;AAAA,YAC9C;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AACL,cAAI,CAAC,WAAW,aAAa;AAC3B,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,UAAU,WAAW;AAAA,UACvB,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,IAAI,MAAM,6BAA6B,IAAI,QAAQ,EAAE;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnGA,IAAAC,mBAA2B;AAG3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AALhC,IAAAE;AAOO,IAAM,yBAAN,cAAqC,4BAAW;AAAA,EAKrD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,EACF,GAGG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAXzB,SAAkBG,OAAU;AAa1B,SAAK,kBAAkB;AAAA,EACzB;AAAA,EAEA,OAAO,WAAW,OAAiD;AACjE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AAnBoBE,MAAAD;;;ACEb,SAAS,sBAEd,UAA4B,SAA6B;AAZ3D,MAAAE;AAaE,QAAM,SAAQA,OAAA,mCAAS,UAAT,OAAAA,OAAmB,CAAC;AAClC,QAAM,eAA8B,CAAC;AAErC,aAAW,WAAW,UAAU;AAC9B,UAAM,EAAE,MAAM,SAAS,iBAAiB,yBAAyB,IAC/D;AAEF,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,2BACL;AAAA,YACE,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,YAC9B,GAAG,mBAAmB,wBAAwB;AAAA,UAChD,IACA;AAAA,QACN,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,mBAAmB,MAAM;AAC3B,uBAAa,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAChD;AAAA,QACF;AAGA,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,YAC9B,GAAG,gBAAgB;AAAA,cACjB,CAAC,EAAE,YAAY,UAAU,KAAK,OAAqB;AAAA,gBACjD,MAAM;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAGD,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,gBAAgB,IAAI,CAAC,mBAAmC;AAC/D,gBAAI,EAAE,YAAY,iBAAiB;AACjC,oBAAM,IAAI,uBAAuB;AAAA,gBAC/B,iBAAiB;AAAA,gBACjB,SACE,wCACA,KAAK,UAAU,cAAc;AAAA,cACjC,CAAC;AAAA,YACH;AAEA,kBAAM,EAAE,YAAY,UAAU,OAAO,IAAI;AAEzC,kBAAMC,QAAO,MAAM,QAAQ;AAC3B,oBAAOA,SAAA,gBAAAA,MAAM,qCAAoC,OAC7C;AAAA,cACE,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,QAAQA,MAAK,iCAAiC,MAAM;AAAA,cACpD,sBACEA,MAAK,iCAAiC,MAAM;AAAA,YAChD,IACA;AAAA,cACE,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACN,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AAEX;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,uBAAuB;AAAA,UAC/B,iBAAiB;AAAA,UACjB,SAAS,qBAAqB,gBAAgB;AAAA,QAChD,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ATzFO,SAAS,kBAA0D;AAAA,EACxE;AAAA,EACA;AACF,GAGuB;AACrB,MAAI,OAAO,UAAU,QAAQ,OAAO,YAAY,MAAM;AACpD,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,UAAU,QAAQ,OAAO,YAAY,MAAM;AACpD,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,MAAI,OAAO,UAAU,QAAQ,OAAO,OAAO,WAAW,UAAU;AAC9D,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,MAAI,OAAO,UAAU,MAAM;AAEzB,QAAI,OAAO,OAAO,WAAW,UAAU;AACrC,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ,OAAO;AAAA,MACf,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,YAAY,MAAM;AAC3B,UAAM,aAAa,iBAAiB,OAAO,QAAQ;AAEnD,QAAI,eAAe,SAAS;AAC1B,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WACJ,eAAe,gBACX,sBAAsB,OAAO,UAAyB;AAAA,MACpD;AAAA,IACF,CAAC,IACA,OAAO;AAEd,UAAM,uBAAmB,0CAAkB;AAAA,MACzC,OAAO;AAAA,MACP,QAAQ,cAAE,MAAM,iBAAiB;AAAA,IACnC,CAAC;AAED,QAAI,CAAC,iBAAiB,SAAS;AAC7B,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,QACT,OAAO,iBAAiB;AAAA,MAC1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,aAAa;AAC/B;;;AUxFO,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA;AACF,GAGuB;AACrB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,eAAe;AAAA,EAC9B;AACF;AAEO,SAAS,sBACd,QACA,QACoB;AACpB,SAAO;AAAA,IACL,cAAc,OAAO,eAAe,OAAO;AAAA,IAC3C,kBAAkB,OAAO,mBAAmB,OAAO;AAAA,IACnD,aAAa,OAAO,cAAc,OAAO;AAAA,EAC3C;AACF;;;ACnDA,IAAM,wBAAwB;AAC9B,IAAM,wBACJ;AACF,IAAM,yBAAyB;AAExB,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA,eAAe,UAAU,OAAO,wBAAwB;AAAA,EACxD,eAAe,UAAU,OACrB,wBACA;AACN,GAKW;AACT,SAAO;AAAA,IACL,UAAU,QAAQ,OAAO,SAAS,IAAI,SAAS;AAAA,IAC/C,UAAU,QAAQ,OAAO,SAAS,IAAI,KAAK;AAAA;AAAA,IAC3C;AAAA,IACA,UAAU,OAAO,KAAK,UAAU,MAAM,IAAI;AAAA,IAC1C;AAAA,EACF,EACG,OAAO,UAAQ,QAAQ,IAAI,EAC3B,KAAK,IAAI;AACd;;;AC7BA,IAAAC,oBAQO;AACP,IAAAC,yBAAoD;AACpD,IAAAC,mBAA8C;;;ACRvC,SAAS,0BACd,QACwB;AACxB,QAAM,SAAS,OAAO,YAAY,IAAI,gBAAsB,CAAC;AAE7D,EAAC,OAAkC,OAAO,aAAa,IAAI,MAAM;AAC/D,UAAM,SAAS,OAAO,UAAU;AAChC,WAAO;AAAA,MACL,MAAM,OAAmC;AACvC,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,eAAO,OAAO,EAAE,MAAM,MAAM,OAAO,OAAU,IAAI,EAAE,MAAM,OAAO,MAAM;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ADkCA,IAAM,yBAAsE;AAAA,EAC1E,MAAM;AAAA,EACN,YAAY;AAAA,EAEZ,sBAAsB,EAAE,OAAO,UAAU,GAAG;AAC1C,WAAO,EAAE,SAAS,MAAM,OAAO,EAAE,SAAS,OAAO,UAAU,EAAE;AAAA,EAC/D;AAAA,EAEA,oBACE,OACA,SAK6B;AAC7B,WAAO,UAAU,SACb;AAAA,MACE,SAAS;AAAA,MACT,OAAO,IAAI,uBAAuB;AAAA,QAChC,SAAS;AAAA,QACT,MAAM,QAAQ;AAAA,QACd,UAAU,QAAQ;AAAA,QAClB,OAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,IACA,EAAE,SAAS,MAAM,MAAM;AAAA,EAC7B;AAAA,EAEA,sBAAsB;AACpB,UAAM,IAAI,gDAA8B;AAAA,MACtC,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,uBAAuB,CAC3B,YACwD;AAAA,EACxD,MAAM;AAAA,EACN,YAAY,OAAO;AAAA,EAEnB,sBAAsB,EAAE,OAAO,UAAU,GAAG;AAC1C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA;AAAA,QAEL,SAAS;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,oBAAoB,OAAwD;AAC1E,eAAO,0CAAkB,EAAE,OAAO,OAAO,CAAC;AAAA,EAC5C;AAAA,EAEA,sBAAsB;AACpB,UAAM,IAAI,gDAA8B;AAAA,MACtC,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,sBAAsB,CAC1B,WACuE;AAEvE,QAAM,EAAE,SAAS,GAAG,WAAW,IAAI,OAAO;AAE1C,SAAO;AAAA,IACL,MAAM;AAAA;AAAA;AAAA;AAAA,IAKN,YAAY;AAAA,MACV,SAAS;AAAA,MACT,MAAM;AAAA,MACN,YAAY;AAAA,QACV,UAAU,EAAE,MAAM,SAAS,OAAO,WAAW;AAAA,MAC/C;AAAA,MACA,UAAU,CAAC,UAAU;AAAA,MACrB,sBAAsB;AAAA,IACxB;AAAA,IAEA,sBAAsB,EAAE,OAAO,cAAc,cAAc,aAAa,GAAG;AA1I/E,UAAAC;AA4IM,UAAI,KAAC,gCAAa,KAAK,KAAK,KAAC,+BAAY,MAAM,QAAQ,GAAG;AACxD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,aAAa,MAAM;AACzB,YAAM,cAA8B,CAAC;AAErC,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,UAAU,WAAW,CAAC;AAC5B,cAAM,aAAS,0CAAkB,EAAE,OAAO,SAAS,OAAO,CAAC;AAM3D,YAAI,MAAM,WAAW,SAAS,KAAK,CAAC,cAAc;AAChD;AAAA,QACF;AAEA,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,QACT;AAEA,oBAAY,KAAK,OAAO,KAAK;AAAA,MAC/B;AAGA,YAAM,yBAAwBA,OAAA,6CAAc,WAAd,OAAAA,OAAwB;AAEtD,UAAI,YAAY;AAEhB,UAAI,cAAc;AAChB,qBAAa;AAAA,MACf;AAEA,UAAI,wBAAwB,GAAG;AAC7B,qBAAa;AAAA,MACf;AAEA,mBAAa,YACV,MAAM,qBAAqB,EAC3B,IAAI,aAAW,KAAK,UAAU,OAAO,CAAC,EACtC,KAAK,GAAG;AAEX,UAAI,cAAc;AAChB,qBAAa;AAAA,MACf;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,oBACE,OACkC;AAElC,UAAI,KAAC,gCAAa,KAAK,KAAK,KAAC,+BAAY,MAAM,QAAQ,GAAG;AACxD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,aAAa,MAAM;AAGzB,iBAAW,WAAW,YAAY;AAChC,cAAM,aAAS,0CAAkB,EAAE,OAAO,SAAS,OAAO,CAAC;AAC3D,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,SAAS,MAAM,OAAO,WAA6B;AAAA,IAC9D;AAAA,IAEA,oBACE,gBACA;AACA,UAAI,oBAAoB;AAExB,aAAO;AAAA,QACL,eAAe;AAAA,UACb,IAAI,gBAAsD;AAAA,YACxD,UAAU,OAAO,YAAY;AAC3B,sBAAQ,MAAM,MAAM;AAAA,gBAClB,KAAK,UAAU;AACb,wBAAM,QAAQ,MAAM;AAGpB,yBAEE,oBAAoB,MAAM,QAC1B,qBACA;AACA,+BAAW,QAAQ,MAAM,iBAAiB,CAAC;AAAA,kBAC7C;AAEA;AAAA,gBACF;AAAA,gBAEA,KAAK;AAAA,gBACL,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,6BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,gBAEF,SAAS;AACP,wBAAM,mBAA0B;AAChC,wBAAM,IAAI;AAAA,oBACR,2BAA2B,gBAAgB;AAAA,kBAC7C;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,qBAAqB,CACzB,eACsC;AACtC,SAAO;AAAA,IACL,MAAM;AAAA;AAAA;AAAA;AAAA,IAKN,YAAY;AAAA,MACV,SAAS;AAAA,MACT,MAAM;AAAA,MACN,YAAY;AAAA,QACV,QAAQ,EAAE,MAAM,UAAU,MAAM,WAAW;AAAA,MAC7C;AAAA,MACA,UAAU,CAAC,QAAQ;AAAA,MACnB,sBAAsB;AAAA,IACxB;AAAA,IAEA,oBAAoB,OAAsD;AAExE,UAAI,KAAC,gCAAa,KAAK,KAAK,OAAO,MAAM,WAAW,UAAU;AAC5D,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OACE;AAAA,UACJ,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,SAAS,MAAM;AAErB,aAAO,WAAW,SAAS,MAAc,IACrC,EAAE,SAAS,MAAM,OAAO,OAAe,IACvC;AAAA,QACE,SAAS;AAAA,QACT,OAAO,IAAI,sCAAoB;AAAA,UAC7B;AAAA,UACA,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACN;AAAA,IAEA,wBAAwB;AAEtB,YAAM,IAAI,gDAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,IAEA,sBAAsB;AAEpB,YAAM,IAAI,gDAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEO,SAAS,kBAA0B;AAAA,EACxC;AAAA,EACA;AAAA,EACA;AACF,GAIkC;AAChC,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,aAAO,yBAAqB,2BAAS,MAAO,CAAC;AAAA,IAC/C,KAAK;AACH,aAAO,wBAAoB,2BAAS,MAAO,CAAC;AAAA,IAC9C,KAAK;AACH,aAAO,mBAAmB,UAA4B;AAAA,IACxD,KAAK;AACH,aAAO;AAAA,IACT,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,MAAM,uBAAuB,gBAAgB,EAAE;AAAA,IAC3D;AAAA,EACF;AACF;;;AEtWO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAOG;AACD,MACE,UAAU,QACV,WAAW,YACX,WAAW,WACX,WAAW,UACX,WAAW,aACX;AACA,UAAM,IAAI,qBAAqB;AAAA,MAC7B,WAAW;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,MAAI,WAAW,aAAa;AAC1B,QAAI,SAAS,UAAU,SAAS,QAAQ;AACtC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,qBAAqB,MAAM;AAC7B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,UAAU;AACvB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,SAAS;AACtB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,QAAQ;AACrB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,qBAAqB,MAAM;AAC7B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,eAAW,SAAS,YAAY;AAC9B,UAAI,OAAO,UAAU,UAAU;AAC7B,cAAM,IAAI,qBAAqB;AAAA,UAC7B,WAAW;AAAA,UACX;AAAA,UACA,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;AzBzHA,IAAM,yBAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AA4P1E,eAAsB,eAA+B;AAAA,EACnD;AAAA,EACA,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,WAAW;AAAA,IACT,YAAAC,cAAa;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GA6B4C;AAC1C,gCAA8B;AAAA,IAC5B;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,iBAAiB,kBAAkB;AAAA,IACvC;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF,CAAC;AAGD,MAAI,eAAe,SAAS,eAAe,SAAS,QAAW;AAC7D,WAAO;AAAA,EACT;AAEA,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,EACtC,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB;AAAA,UACvB,aAAa;AAAA,UACb;AAAA,QACF,CAAC;AAAA,QACD,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,QAC1D;AAAA,QACA,aACE,eAAe,cAAc,OACzB,EAAE,OAAO,MAAM,KAAK,UAAU,eAAe,UAAU,EAAE,IACzD;AAAA,QACN,kBAAkB;AAAA,QAClB,yBAAyB;AAAA,QACzB,sBAAsB,eAAe;AAAA,QACrC,oBAAoB;AAAA,MACtB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAxYtB,UAAAC,MAAA;AA0YM,UAAI,SAAS,UAAU,QAAQ,MAAM;AACnC,eAAO,MAAM;AAAA,MACf;AAEA,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AAEJ,cAAQ,MAAM;AAAA,QACZ,KAAK,QAAQ;AACX,gBAAM,qBAAqB,kBAAkB;AAAA,YAC3C,QAAQ;AAAA,cACN,QACE,eAAe,cAAc,OACzB,sBAAsB,EAAE,QAAQ,OAAO,CAAC,IACxC,MAAM,4BACN,SACA,sBAAsB;AAAA,gBACpB,QAAQ;AAAA,gBACR,QAAQ,eAAe;AAAA,cACzB,CAAC;AAAA,cACP;AAAA,cACA;AAAA,YACF;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAED,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,YACR,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AAED,gBAAM,iBAAiB,MAAM;AAAA,YAAM,MACjC,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM,mBAAmB;AAAA,kBAClC;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,oBAAoB;AAAA;AAAA,kBAGpB,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,IAAI,OAAMC,UAAQ;AAhdhC,oBAAAD,MAAAE,KAAA;AAidgB,sBAAMC,UAAS,MAAM,MAAM,WAAW;AAAA,kBACpC,MAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,QAAQ,eAAe;AAAA,oBACvB,MAAM;AAAA,oBACN,aAAa;AAAA,kBACf;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B,aAAa,mBAAmB;AAAA,kBAChC,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAED,sBAAM,eAAe;AAAA,kBACnB,KAAID,OAAAF,OAAAG,QAAO,aAAP,gBAAAH,KAAiB,OAAjB,OAAAE,MAAuBH,YAAW;AAAA,kBACtC,YAAW,WAAAI,QAAO,aAAP,mBAAiB,cAAjB,YAA8B,YAAY;AAAA,kBACrD,UAAS,WAAAA,QAAO,aAAP,mBAAiB,YAAjB,YAA4B,MAAM;AAAA,gBAC7C;AAEA,oBAAIA,QAAO,SAAS,QAAW;AAC7B,wBAAM,IAAI,uBAAuB;AAAA,oBAC/B,SACE;AAAA,oBACF,UAAU;AAAA,oBACV,OAAO,4BAA4BA,QAAO,KAAK;AAAA,kBACjD,CAAC;AAAA,gBACH;AAGA,gBAAAF,MAAK;AAAA,kBACH,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4BE,QAAO;AAAA,sBACnC,sBAAsB,EAAE,QAAQ,MAAMA,QAAO,KAAK;AAAA,sBAClD,kBAAkB,aAAa;AAAA,sBAC/B,qBAAqB,aAAa;AAAA,sBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,sBAErC,yBAAyBA,QAAO,MAAM;AAAA,sBACtC,6BACEA,QAAO,MAAM;AAAA;AAAA,sBAGf,kCAAkC,CAACA,QAAO,YAAY;AAAA,sBACtD,sBAAsB,aAAa;AAAA,sBACnC,yBAAyB,aAAa;AAAA,sBACtC,8BAA8BA,QAAO,MAAM;AAAA,sBAC3C,kCACEA,QAAO,MAAM;AAAA,oBACjB;AAAA,kBACF,CAAC;AAAA,gBACH;AAEA,uBAAO,EAAE,GAAGA,SAAQ,YAAYA,QAAO,MAAM,aAAa;AAAA,cAC5D;AAAA,YACF,CAAC;AAAA,UACH;AAEA,mBAAS,eAAe;AACxB,yBAAe,eAAe;AAC9B,kBAAQ,eAAe;AACvB,qBAAW,eAAe;AAC1B,wBAAc,eAAe;AAC7B,qBAAW,eAAe;AAC1B,mCAAyB,eAAe;AACxC,qBAAUH,OAAA,eAAe,YAAf,OAAAA,OAA0B,CAAC;AACrC,qBAAW,eAAe;AAE1B;AAAA,QACF;AAAA,QAEA,KAAK,QAAQ;AACX,gBAAM,qBAAqB,kBAAkB;AAAA,YAC3C,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,YACnC,OAAO;AAAA,UACT,CAAC;AAED,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,YACR,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AACD,gBAAM,cAAc,mBAAmB;AAEvC,gBAAM,iBAAiB,MAAM;AAAA,YAAM,MACjC,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM;AAAA,kBACf;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,oBAAoB;AAAA;AAAA,kBAGpB,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,IAAI,OAAMC,UAAQ;AAxkBhC,oBAAAD,MAAAE,KAAA;AAykBgB,sBAAMC,UAAS,MAAM,MAAM,WAAW;AAAA,kBACpC,MAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,MAAM;AAAA,sBACJ,MAAM;AAAA,sBACN,MAAM,kCAAc;AAAA,sBACpB,aACE,gDAAqB;AAAA,sBACvB,YAAY,eAAe;AAAA,oBAC7B;AAAA,kBACF;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B;AAAA,kBACA,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAED,sBAAM,cAAaD,OAAAF,OAAAG,QAAO,cAAP,gBAAAH,KAAmB,OAAnB,gBAAAE,IAAuB;AAE1C,sBAAM,eAAe;AAAA,kBACnB,KAAI,WAAAC,QAAO,aAAP,mBAAiB,OAAjB,YAAuBJ,YAAW;AAAA,kBACtC,YAAW,WAAAI,QAAO,aAAP,mBAAiB,cAAjB,YAA8B,YAAY;AAAA,kBACrD,UAAS,WAAAA,QAAO,aAAP,mBAAiB,YAAjB,YAA4B,MAAM;AAAA,gBAC7C;AAEA,oBAAI,eAAe,QAAW;AAC5B,wBAAM,IAAI,uBAAuB;AAAA,oBAC/B,SAAS;AAAA,oBACT,UAAU;AAAA,oBACV,OAAO,4BAA4BA,QAAO,KAAK;AAAA,kBACjD,CAAC;AAAA,gBACH;AAGA,gBAAAF,MAAK;AAAA,kBACH,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4BE,QAAO;AAAA,sBACnC,sBAAsB,EAAE,QAAQ,MAAM,WAAW;AAAA,sBACjD,kBAAkB,aAAa;AAAA,sBAC/B,qBAAqB,aAAa;AAAA,sBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,sBAErC,yBAAyBA,QAAO,MAAM;AAAA,sBACtC,6BACEA,QAAO,MAAM;AAAA;AAAA,sBAGf,kCAAkC,CAACA,QAAO,YAAY;AAAA,sBACtD,sBAAsB,aAAa;AAAA,sBACnC,yBAAyB,aAAa;AAAA,sBACtC,6BAA6BA,QAAO,MAAM;AAAA,sBAC1C,8BACEA,QAAO,MAAM;AAAA,oBACjB;AAAA,kBACF,CAAC;AAAA,gBACH;AAEA,uBAAO,EAAE,GAAGA,SAAQ,YAAY,aAAa;AAAA,cAC/C;AAAA,YACF,CAAC;AAAA,UACH;AAEA,mBAAS,eAAe;AACxB,yBAAe,eAAe;AAC9B,kBAAQ,eAAe;AACvB,qBAAW,eAAe;AAC1B,wBAAc,eAAe;AAC7B,qBAAW,eAAe;AAC1B,mCAAyB,eAAe;AACxC,qBAAU,oBAAe,YAAf,YAA0B,CAAC;AACrC,qBAAW,eAAe;AAE1B;AAAA,QACF;AAAA,QAEA,KAAK,QAAW;AACd,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QAEA,SAAS;AACP,gBAAM,mBAA0B;AAChC,gBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,QACzD;AAAA,MACF;AAEA,YAAM,kBAAc,sCAAc,EAAE,MAAM,OAAO,CAAC;AAElD,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,YAAY;AAAA,UACnB,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C,CAAC;AAAA,MACH;AAEA,YAAM,mBAAmB,eAAe;AAAA,QACtC,YAAY;AAAA,QACZ;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C;AAAA,MACF;AAEA,UAAI,CAAC,iBAAiB,SAAS;AAC7B,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,iBAAiB;AAAA,UACxB,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C,CAAC;AAAA,MACH;AAGA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,4BAA4B;AAAA,YAC5B,sBAAsB;AAAA,cACpB,QAAQ,MAAM,KAAK,UAAU,iBAAiB,KAAK;AAAA,YACrD;AAAA,YAEA,yBAAyB,MAAM;AAAA,YAC/B,6BAA6B,MAAM;AAAA,UACrC;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,4BAA4B;AAAA,QACrC,QAAQ,iBAAiB;AAAA,QACzB;AAAA,QACA,OAAO,4BAA4B,KAAK;AAAA,QACxC;AAAA,QACA;AAAA,QACA,UAAU;AAAA,UACR,GAAG;AAAA,UACH,SAAS,2CAAa;AAAA,QACxB;AAAA,QACA;AAAA,QACA,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,IAAM,8BAAN,MAAwE;AAAA,EAUtE,YAAY,SAST;AACD,SAAK,SAAS,QAAQ;AACtB,SAAK,eAAe,QAAQ;AAC5B,SAAK,QAAQ,QAAQ;AACrB,SAAK,WAAW,QAAQ;AACxB,SAAK,gCAAgC,QAAQ;AAC7C,SAAK,WAAW,QAAQ;AACxB,SAAK,UAAU,QAAQ;AACvB,SAAK,WAAW,QAAQ;AAAA,EAC1B;AAAA,EAEA,eAAe,MAA+B;AAnwBhD,QAAAH;AAowBI,WAAO,IAAI,SAAS,KAAK,UAAU,KAAK,MAAM,GAAG;AAAA,MAC/C,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;A0BrwBA,IAAAI,yBAAkC;AAClC,IAAAC,mBAKO;;;ACPA,IAAM,iBAAN,MAAwB;AAAA,EAAxB;AACL,SAAQ,SAGmC,EAAE,MAAM,UAAU;AAE7D,SAAQ,WAA6C;AACrD,SAAQ,UAAkD;AAAA;AAAA,EAE1D,IAAI,QAAoB;AACtB,QAAI,KAAK,SAAS;AAChB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,UAAU,IAAI,QAAW,CAAC,SAAS,WAAW;AACjD,UAAI,KAAK,OAAO,SAAS,YAAY;AACnC,gBAAQ,KAAK,OAAO,KAAK;AAAA,MAC3B,WAAW,KAAK,OAAO,SAAS,YAAY;AAC1C,eAAO,KAAK,OAAO,KAAK;AAAA,MAC1B;AAEA,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IACjB,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,QAAQ,OAAgB;AAjC1B,QAAAC;AAkCI,SAAK,SAAS,EAAE,MAAM,YAAY,MAAM;AAExC,QAAI,KAAK,SAAS;AAChB,OAAAA,OAAA,KAAK,aAAL,gBAAAA,KAAA,WAAgB;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,OAAO,OAAsB;AAzC/B,QAAAA;AA0CI,SAAK,SAAS,EAAE,MAAM,YAAY,MAAM;AAExC,QAAI,KAAK,SAAS;AAChB,OAAAA,OAAA,KAAK,YAAL,gBAAAA,KAAA,WAAe;AAAA,IACjB;AAAA,EACF;AACF;;;ACvCO,SAAS,0BAId;AACA,MAAI;AACJ,MAAI;AAEJ,QAAM,UAAU,IAAI,QAAW,CAAC,KAAK,QAAQ;AAC3C,cAAU;AACV,aAAS;AAAA,EACX,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACnBO,SAAS,yBAId;AACA,MAAI,qBAAuD,CAAC;AAC5D,MAAI,aAAwD;AAC5D,MAAI,WAAW;AACf,MAAI,mBAAmB,wBAA8B;AAErD,QAAM,cAAc,YAAY;AAE9B,QAAI,YAAY,mBAAmB,WAAW,GAAG;AAC/C,+CAAY;AACZ;AAAA,IACF;AAIA,QAAI,mBAAmB,WAAW,GAAG;AACnC,yBAAmB,wBAA8B;AACjD,YAAM,iBAAiB;AACvB,aAAO,YAAY;AAAA,IACrB;AAEA,QAAI;AACF,YAAM,EAAE,OAAO,KAAK,IAAI,MAAM,mBAAmB,CAAC,EAAE,KAAK;AAEzD,UAAI,MAAM;AAER,2BAAmB,MAAM;AAGzB,YAAI,mBAAmB,SAAS,GAAG;AACjC,gBAAM,YAAY;AAAA,QACpB,WAAW,UAAU;AACnB,mDAAY;AAAA,QACd;AAAA,MACF,OAAO;AAEL,iDAAY,QAAQ;AAAA,MACtB;AAAA,IACF,SAAS,OAAO;AAEd,+CAAY,MAAM;AAClB,yBAAmB,MAAM;AAEzB,UAAI,YAAY,mBAAmB,WAAW,GAAG;AAC/C,iDAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,IAAI,eAAkB;AAAA,MAC5B,MAAM,iBAAiB;AACrB,qBAAa;AAAA,MACf;AAAA,MACA,MAAM;AAAA,MACN,MAAM,SAAS;AACb,mBAAW,UAAU,oBAAoB;AACvC,gBAAM,OAAO,OAAO;AAAA,QACtB;AACA,6BAAqB,CAAC;AACtB,mBAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,IACD,WAAW,CAAC,gBAAmC;AAC7C,UAAI,UAAU;AACZ,cAAM,IAAI,MAAM,iDAAiD;AAAA,MACnE;AAEA,yBAAmB,KAAK,YAAY,UAAU,CAAC;AAC/C,uBAAiB,QAAQ;AAAA,IAC3B;AAAA,IACA,OAAO,MAAM;AACX,iBAAW;AACX,uBAAiB,QAAQ;AAEzB,UAAI,mBAAmB,WAAW,GAAG;AACnC,iDAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACF;;;AC3FO,SAAS,MAAc;AAD9B,MAAAC,MAAA;AAEE,UAAO,MAAAA,OAAA,yCAAY,gBAAZ,gBAAAA,KAAyB,UAAzB,YAAkC,KAAK,IAAI;AACpD;;;AJqDA,IAAMC,0BAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAwPnE,SAAS,aAAsD;AAAA,EACpE;AAAA,EACA,QAAQ;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B;AAAA,EACA,WAAW;AAAA,IACT,YAAAC,cAAaD;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,IAC7B,KAAAE,OAAM;AAAA,EACR,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GA0B0D;AACxD,gCAA8B;AAAA,IAC5B;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB,kBAAkB,EAAE,QAAQ,QAAQ,YAAY,CAAC;AAGxE,MAAI,eAAe,SAAS,eAAe,SAAS,QAAW;AAC7D,WAAO;AAAA,EACT;AAEA,SAAO,IAAI,0BAA0B;AAAA,IACnC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,uBAAuB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,YAAAD;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,EACF,CAAC;AACH;AAEA,IAAM,4BAAN,MAEA;AAAA,EAuBE,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAAD;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,EACF,GAmBG;AA5DH,SAAiB,gBAAgB,IAAI,eAAuB;AAC5D,SAAiB,eAAe,IAAI,eAAmC;AACvE,SAAiB,0BAA0B,IAAI,eAE7C;AACF,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,iBACf,IAAI,eAA6C;AACnD,SAAiB,kBACf,IAAI,eAA8C;AAEpD,SAAiB,mBACf,uBAAkD;AA+ClD,UAAM,EAAE,YAAY,MAAM,IAAI,eAAe;AAAA,MAC3C,YAAY;AAAA,IACd,CAAC;AAED,UAAM,0BAA0B,2BAA2B;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,IACtC,CAAC;AAED,UAAM,SAAS,UAAU,SAAS;AAClC,UAAM,OAAO;AAEb,eAAW;AAAA,MACT,MAAM;AAAA,MACN,YAAY,0BAA0B;AAAA,QACpC;AAAA,QACA,YAAY;AAAA,UACV,GAAG,sBAAsB;AAAA,YACvB,aAAa;AAAA,YACb;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA;AAAA,UAEH,aAAa;AAAA,YACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,UAC1D;AAAA,UACA,aACE,eAAe,cAAc,OACzB,EAAE,OAAO,MAAM,KAAK,UAAU,eAAe,UAAU,EAAE,IACzD;AAAA,UACN,kBAAkB;AAAA,UAClB,yBAAyB;AAAA,UACzB,sBAAsB,eAAe;AAAA,UACrC,oBAAoB;AAAA,QACtB;AAAA,MACF,CAAC;AAAA,MACD;AAAA,MACA,aAAa;AAAA,MACb,IAAI,OAAM,aAAY;AAEpB,YAAI,SAAS,UAAU,QAAQ,MAAM;AACnC,iBAAO,MAAM;AAAA,QACf;AAEA,YAAI;AACJ,YAAI;AAKJ,gBAAQ,MAAM;AAAA,UACZ,KAAK,QAAQ;AACX,kBAAM,qBAAqB,kBAAkB;AAAA,cAC3C,QAAQ;AAAA,gBACN,QACE,eAAe,cAAc,OACzB,sBAAsB,EAAE,QAAQ,OAAO,CAAC,IACxC,MAAM,4BACN,SACA,sBAAsB;AAAA,kBACpB,QAAQ;AAAA,kBACR,QAAQ,eAAe;AAAA,gBACzB,CAAC;AAAA,gBACP;AAAA,gBACA;AAAA,cACF;AAAA,cACA,OAAO;AAAA,YACT,CAAC;AAED,0BAAc;AAAA,cACZ,MAAM;AAAA,gBACJ,MAAM;AAAA,gBACN,QAAQ,eAAe;AAAA,gBACvB,MAAM;AAAA,gBACN,aAAa;AAAA,cACf;AAAA,cACA,GAAG,oBAAoB,QAAQ;AAAA,cAC/B,aAAa,mBAAmB;AAAA,cAChC,QAAQ,MAAM,6BAA6B;AAAA,gBACzC,QAAQ;AAAA,gBACR,wBAAwB,MAAM;AAAA,gBAC9B,kBAAkB,MAAM;AAAA,cAC1B,CAAC;AAAA,cACD,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF;AAEA,0BAAc;AAAA,cACZ,WAAW,CAAC,OAAO,eAAe;AAChC,wBAAQ,MAAM,MAAM;AAAA,kBAClB,KAAK;AACH,+BAAW,QAAQ,MAAM,SAAS;AAClC;AAAA,kBACF,KAAK;AAAA,kBACL,KAAK;AAAA,kBACL,KAAK;AACH,+BAAW,QAAQ,KAAK;AACxB;AAAA,gBACJ;AAAA,cACF;AAAA,YACF;AAEA;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,kBAAM,qBAAqB,kBAAkB;AAAA,cAC3C,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,cACnC,OAAO;AAAA,YACT,CAAC;AAED,0BAAc;AAAA,cACZ,MAAM;AAAA,gBACJ,MAAM;AAAA,gBACN,MAAM;AAAA,kBACJ,MAAM;AAAA,kBACN,MAAM,kCAAc;AAAA,kBACpB,aACE,gDAAqB;AAAA,kBACvB,YAAY,eAAe;AAAA,gBAC7B;AAAA,cACF;AAAA,cACA,GAAG,oBAAoB,QAAQ;AAAA,cAC/B,aAAa,mBAAmB;AAAA,cAChC,QAAQ,MAAM,6BAA6B;AAAA,gBACzC,QAAQ;AAAA,gBACR,wBAAwB,MAAM;AAAA,gBAC9B,kBAAkB,MAAM;AAAA,cAC1B,CAAC;AAAA,cACD,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF;AAEA,0BAAc;AAAA,cACZ,UAAU,OAAO,YAAY;AAC3B,wBAAQ,MAAM,MAAM;AAAA,kBAClB,KAAK;AACH,+BAAW,QAAQ,MAAM,aAAa;AACtC;AAAA,kBACF,KAAK;AAAA,kBACL,KAAK;AAAA,kBACL,KAAK;AACH,+BAAW,QAAQ,KAAK;AACxB;AAAA,gBACJ;AAAA,cACF;AAAA,YACF;AAEA;AAAA,UACF;AAAA,UAEA,KAAK,QAAW;AACd,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,UAEA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,UACzD;AAAA,QACF;AAEA,cAAM;AAAA,UACJ,QAAQ,EAAE,QAAQ,UAAU,aAAa,QAAQ;AAAA,UACjD;AAAA,UACA;AAAA,QACF,IAAI,MAAM;AAAA,UAAM,MACd,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA,gBACH,oBAAoB;AAAA,kBAClB,OAAO,MAAM,YAAY;AAAA,gBAC3B;AAAA,gBACA,sBAAsB;AAAA,kBACpB,OAAO,MAAM,KAAK,UAAU,YAAY,MAAM;AAAA,gBAChD;AAAA,gBACA,oBAAoB;AAAA;AAAA,gBAGpB,iBAAiB,MAAM;AAAA,gBACvB,wBAAwB,MAAM;AAAA,gBAC9B,oCAAoC,SAAS;AAAA,gBAC7C,6BAA6B,SAAS;AAAA,gBACtC,mCAAmC,SAAS;AAAA,gBAC5C,8BAA8B,SAAS;AAAA,gBACvC,wBAAwB,SAAS;AAAA,gBACjC,wBAAwB,SAAS;AAAA,cACnC;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,aAAa;AAAA,YACb,IAAI,OAAMC,mBAAiB;AAAA,cACzB,kBAAkBD,KAAI;AAAA,cACtB,cAAAC;AAAA,cACA,QAAQ,MAAM,MAAM,SAAS,WAAW;AAAA,YAC1C;AAAA,UACF,CAAC;AAAA,QACH;AAEA,aAAK,eAAe,QAAQ,4BAAW,CAAC,CAAC;AAGzC,YAAI;AACJ,YAAI;AACJ,YAAI;AACJ,YAAIC;AACJ,YAAI;AAGJ,YAAI,kBAAkB;AACtB,YAAI,YAAY;AAChB,YAAI,WAIA;AAAA,UACF,IAAIH,YAAW;AAAA,UACf,WAAW,YAAY;AAAA,UACvB,SAAS,MAAM;AAAA,QACjB;AAIA,YAAI,mBAA0C;AAC9C,YAAI,eAAoC;AACxC,YAAI,eAAe;AACnB,YAAI,eAAe;AAEnB,cAAM,oBAAoB,OACvB,YAAY,IAAI,gBAAgB,WAAW,CAAC,EAC5C;AAAA,UACC,IAAI,gBAGF;AAAA,YACA,MAAM,UAAU,OAAO,YAA2B;AA7rBhE,kBAAAI,MAAA;AA+rBgB,kBAAI,cAAc;AAChB,sBAAM,iBAAiBH,KAAI,IAAI;AAE/B,+BAAe;AAEf,6BAAa,SAAS,wBAAwB;AAAA,kBAC5C,4BAA4B;AAAA,gBAC9B,CAAC;AAED,6BAAa,cAAc;AAAA,kBACzB,4BAA4B;AAAA,gBAC9B,CAAC;AAAA,cACH;AAGA,kBAAI,OAAO,UAAU,UAAU;AAC7B,mCAAmB;AACnB,6BAAa;AAEb,sBAAM,EAAE,OAAO,mBAAmB,OAAO,WAAW,QAClD,mCAAiB,eAAe;AAElC,oBACE,sBAAsB,UACtB,KAAC,kCAAgB,kBAAkB,iBAAiB,GACpD;AACA,wBAAM,mBACJ,eAAe,sBAAsB;AAAA,oBACnC,OAAO;AAAA,oBACP;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,cAAc,eAAe;AAAA,kBAC/B,CAAC;AAEH,sBACE,iBAAiB,WACjB,KAAC;AAAA,oBACC;AAAA,oBACA,iBAAiB,MAAM;AAAA,kBACzB,GACA;AAEA,uCAAmB;AACnB,mCAAe,iBAAiB,MAAM;AAEtC,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,QAAQ;AAAA,oBACV,CAAC;AAED,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,WAAW,iBAAiB,MAAM;AAAA,oBACpC,CAAC;AAED,gCAAY;AACZ,mCAAe;AAAA,kBACjB;AAAA,gBACF;AAEA;AAAA,cACF;AAEA,sBAAQ,MAAM,MAAM;AAAA,gBAClB,KAAK,qBAAqB;AACxB,6BAAW;AAAA,oBACT,KAAIG,OAAA,MAAM,OAAN,OAAAA,OAAY,SAAS;AAAA,oBACzB,YAAW,WAAM,cAAN,YAAmB,SAAS;AAAA,oBACvC,UAAS,WAAM,YAAN,YAAiB,SAAS;AAAA,kBACrC;AACA;AAAA,gBACF;AAAA,gBAEA,KAAK,UAAU;AAEb,sBAAI,cAAc,IAAI;AACpB,+BAAW,QAAQ,EAAE,MAAM,cAAc,UAAU,CAAC;AAAA,kBACtD;AAGA,iCAAe,MAAM;AAGrB,0BAAQ,4BAA4B,MAAM,KAAK;AAC/C,qCAAmB,MAAM;AAEzB,6BAAW,QAAQ,EAAE,GAAG,OAAO,OAAO,SAAS,CAAC;AAGhD,uBAAK,aAAa,QAAQ,KAAK;AAC/B,uBAAK,wBAAwB,QAAQ,gBAAgB;AACrD,uBAAK,gBAAgB,QAAQ;AAAA,oBAC3B,GAAG;AAAA,oBACH,SAAS,2CAAa;AAAA,kBACxB,CAAC;AAGD,wBAAM,mBAAmB,eAAe;AAAA,oBACtC;AAAA,oBACA;AAAA,sBACE,MAAM;AAAA,sBACN;AAAA,sBACA;AAAA,oBACF;AAAA,kBACF;AAEA,sBAAI,iBAAiB,SAAS;AAC5B,oBAAAD,UAAS,iBAAiB;AAC1B,yBAAK,cAAc,QAAQA,OAAM;AAAA,kBACnC,OAAO;AACL,4BAAQ,IAAI,uBAAuB;AAAA,sBACjC,SACE;AAAA,sBACF,OAAO,iBAAiB;AAAA,sBACxB,MAAM;AAAA,sBACN;AAAA,sBACA;AAAA,oBACF,CAAC;AACD,yBAAK,cAAc,OAAO,KAAK;AAAA,kBACjC;AAEA;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,6BAAW,QAAQ,KAAK;AACxB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA;AAAA,YAGA,MAAM,MAAM,YAAY;AACtB,kBAAI;AACF,sBAAM,aAAa,wBAAS;AAAA,kBAC1B,cAAc;AAAA,kBACd,kBAAkB;AAAA,kBAClB,aAAa;AAAA,gBACf;AAEA,6BAAa;AAAA,kBACX,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4B;AAAA,sBAC5B,sBAAsB;AAAA,wBACpB,QAAQ,MAAM,KAAK,UAAUA,OAAM;AAAA,sBACrC;AAAA,sBACA,kBAAkB,SAAS;AAAA,sBAC3B,qBAAqB,SAAS;AAAA,sBAC9B,yBACE,SAAS,UAAU,YAAY;AAAA,sBAEjC,yBAAyB,WAAW;AAAA,sBACpC,6BACE,WAAW;AAAA;AAAA,sBAGb,kCAAkC,CAAC,YAAY;AAAA,sBAC/C,sBAAsB,SAAS;AAAA,sBAC/B,yBAAyB,SAAS;AAAA,sBAClC,6BAA6B,WAAW;AAAA,sBACxC,8BACE,WAAW;AAAA,oBACf;AAAA,kBACF,CAAC;AAAA,gBACH;AAGA,6BAAa,IAAI;AAGjB,yBAAS;AAAA,kBACP,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,yBAAyB,WAAW;AAAA,sBACpC,6BACE,WAAW;AAAA,sBACb,sBAAsB;AAAA,wBACpB,QAAQ,MAAM,KAAK,UAAUA,OAAM;AAAA,sBACrC;AAAA,oBACF;AAAA,kBACF,CAAC;AAAA,gBACH;AAGA,uBAAM,qCAAW;AAAA,kBACf,OAAO;AAAA,kBACP,QAAAA;AAAA,kBACA;AAAA,kBACA,UAAU;AAAA,oBACR,GAAG;AAAA,oBACH,SAAS,2CAAa;AAAA,kBACxB;AAAA,kBACA;AAAA,kBACA,+BAA+B;AAAA,gBACjC;AAAA,cACF,SAASE,QAAO;AACd,2BAAW,MAAMA,MAAK;AAAA,cACxB,UAAE;AACA,yBAAS,IAAI;AAAA,cACf;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAEF,aAAK,iBAAiB,UAAU,iBAAiB;AAAA,MACnD;AAAA,IACF,CAAC,EACE,MAAM,WAAS;AAEd,WAAK,iBAAiB;AAAA,QACpB,IAAI,eAAe;AAAA,UACjB,MAAM,YAAY;AAChB,uBAAW,MAAM,KAAK;AAAA,UACxB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC,EACA,QAAQ,MAAM;AACb,WAAK,iBAAiB,MAAM;AAAA,IAC9B,CAAC;AAEH,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,IAAI,SAAS;AACX,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,IAAI,gCAAgC;AAClC,WAAO,KAAK,wBAAwB;AAAA,EACtC;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,sBAAoD;AACtD,WAAO;AAAA,MACL,KAAK,iBAAiB,OAAO;AAAA,QAC3B,IAAI,gBAAoD;AAAA,UACtD,UAAU,OAAO,YAAY;AAC3B,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,2BAAW,QAAQ,MAAM,MAAM;AAC/B;AAAA,cAEF,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cAEF,KAAK;AACH,2BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,cAEF,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,gBAAgC;AAClC,WAAO,KAAK,eAAe;AAAA,MACzB,KAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,IAAI,aAA0C;AAC5C,WAAO;AAAA,MACL,KAAK,iBAAiB,OAAO;AAAA,QAC3B,IAAI,gBAAmD;AAAA,UACrD,UAAU,OAAO,YAAY;AAC3B,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,2BAAW,QAAQ,MAAM,SAAS;AAClC;AAAA,cAEF,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cAEF,KAAK;AACH,2BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,cAEF,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,aAA6D;AAC/D,WAAO,0BAA0B,KAAK,iBAAiB,MAAM;AAAA,EAC/D;AAAA,EAEA,yBAAyB,UAA0B,MAAqB;AACtE,0BAAsB;AAAA,MACpB;AAAA,MACA,QAAQ,6BAAM;AAAA,MACd,YAAY,6BAAM;AAAA,MAClB,SAAS,2BAA2B,6BAAM,SAAS;AAAA,QACjD,aAAa;AAAA,MACf,CAAC;AAAA,MACD,QAAQ,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA,EAEA,qBAAqB,MAA+B;AA3gCtD,QAAAD;AA4gCI,WAAO,IAAI,SAAS,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC,GAAG;AAAA,MACxE,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;AKnhCA,IAAAE,yBAAkC;;;ACAlC,IAAAC,oBAYO;;;ACZP,IAAAC,oBAA4C;AAE5C,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,4BAAN,cAAwC,6BAAW;AAAA,EAMxD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,8BAA8B,QAAQ,SAAK;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,EACH,GAKG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AAlBhC,SAAkBG,OAAU;AAoB1B,SAAK,WAAW;AAChB,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,OAAO,WAAW,OAAoD;AACpE,WAAO,6BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA3BoBE,MAAAD;;;ACPpB,IAAAE,oBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,kBAAN,cAA8B,6BAAW;AAAA,EAM9C,YAAY;AAAA,IACV;AAAA,IACA,iBAAiB;AAAA,IACjB,UAAU,yCAAyC,QAAQ,MACzD,mBAAmB,SACf,4BACA,oBAAoB,eAAe,KAAK,IAAI,CAAC,GACnD;AAAA,EACF,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAlBzB,SAAkBG,OAAU;AAoB1B,SAAK,WAAW;AAChB,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,OAAO,WAAW,OAA0C;AAC1D,WAAO,6BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA3BoBE,MAAAD;;;ACPpB,IAAAE,oBAA4C;AAI5C,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AANhC,IAAAE;AAQO,IAAM,sBAAN,cAAkC,6BAAW;AAAA,EAKlD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA,UAAU,kCAA8B,mCAAgB,KAAK,CAAC;AAAA,EAChE,GAIG;AACD,UAAM,EAAE,MAAAH,QAAM,SAAS,MAAM,CAAC;AAbhC,SAAkBG,QAAU;AAc1B,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,OAAO,WAAW,OAA8C;AAC9D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AApBoBE,OAAAD;;;ACTpB,IAAAE,oBAAuD;AAEvD,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,qBAAN,cAAiC,6BAAW;AAAA,EAMjD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,wBAAwB,QAAQ,SAAK,mCAAgB,KAAK,CAAC;AAAA,EACvE,GAKG;AACD,UAAM,EAAE,MAAAH,QAAM,SAAS,MAAM,CAAC;AAhBhC,SAAkBG,QAAU;AAkB1B,SAAK,WAAW;AAChB,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,OAAO,WAAW,OAA6C;AAC7D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AAzBoBE,OAAAD;;;ACFpB,IAAAE,mBAAyB;;;ACLlB,SAAS,iBACdC,SACmC;AACnC,SAAOA,WAAU,QAAQ,OAAO,KAAKA,OAAM,EAAE,SAAS;AACxD;;;ADMO,SAAS,0BAEd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GASE;AACA,MAAI,CAAC,iBAAiB,KAAK,GAAG;AAC5B,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY;AAAA,IACd;AAAA,EACF;AAGA,QAAM,gBACJ,eAAe,OACX,OAAO,QAAQ,KAAK,EAAE;AAAA,IAAO,CAAC,CAACC,MAAI,MACjC,YAAY,SAASA,MAAmB;AAAA,EAC1C,IACA,OAAO,QAAQ,KAAK;AAE1B,SAAO;AAAA,IACL,OAAO,cAAc,IAAI,CAAC,CAACA,QAAMC,KAAI,MAAM;AACzC,YAAM,WAAWA,MAAK;AACtB,cAAQ,UAAU;AAAA,QAChB,KAAK;AAAA,QACL,KAAK;AACH,iBAAO;AAAA,YACL,MAAM;AAAA,YACN,MAAAD;AAAA,YACA,aAAaC,MAAK;AAAA,YAClB,gBAAY,2BAASA,MAAK,UAAU,EAAE;AAAA,UACxC;AAAA,QACF,KAAK;AACH,iBAAO;AAAA,YACL,MAAM;AAAA,YACN,MAAAD;AAAA,YACA,IAAIC,MAAK;AAAA,YACT,MAAMA,MAAK;AAAA,UACb;AAAA,QACF,SAAS;AACP,gBAAM,kBAAyB;AAC/B,gBAAM,IAAI,MAAM,0BAA0B,eAAe,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF,CAAC;AAAA,IACD,YACE,cAAc,OACV,EAAE,MAAM,OAAO,IACf,OAAO,eAAe,WACtB,EAAE,MAAM,WAAW,IACnB,EAAE,MAAM,QAAiB,UAAU,WAAW,SAAmB;AAAA,EACzE;AACF;;;AEzEA,IAAM,uBAAuB;AAWtB,SAAS,sBAAsBC,OAMxB;AACZ,QAAM,QAAQA,MAAK,MAAM,oBAAoB;AAC7C,SAAO,QACH,EAAE,QAAQ,MAAM,CAAC,GAAG,YAAY,MAAM,CAAC,GAAG,QAAQ,MAAM,CAAC,EAAE,IAC3D;AACN;;;ACpBO,SAAS,8BAA8BC,OAAsB;AAClE,QAAM,QAAQ,sBAAsBA,KAAI;AACxC,SAAO,QAAQ,MAAM,SAAS,MAAM,aAAaA;AACnD;;;ACJA,IAAAC,yBAAiD;AACjD,IAAAC,mBAAiC;AAUjC,eAAsB,cAAsD;AAAA,EAC1E;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAMkC;AAChC,MAAI,SAAS,MAAM;AACjB,UAAM,IAAI,gBAAgB,EAAE,UAAU,SAAS,SAAS,CAAC;AAAA,EAC3D;AAEA,MAAI;AACF,WAAO,MAAM,gBAAgB,EAAE,UAAU,MAAM,CAAC;AAAA,EAClD,SAAS,OAAO;AACd,QACE,kBAAkB,QAClB,EACE,gBAAgB,WAAW,KAAK,KAChC,0BAA0B,WAAW,KAAK,IAE5C;AACA,YAAM;AAAA,IACR;AAEA,QAAI,mBAA2D;AAE/D,QAAI;AACF,yBAAmB,MAAM,eAAe;AAAA,QACtC;AAAA,QACA;AAAA,QACA,iBAAiB,CAAC,EAAE,SAAS,UAC3B,2BAAS,MAAM,QAAQ,EAAE,UAAU,EAAE;AAAA,QACvC;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,aAAa;AACpB,YAAM,IAAI,oBAAoB;AAAA,QAC5B,OAAO;AAAA,QACP,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAGA,QAAI,oBAAoB,MAAM;AAC5B,YAAM;AAAA,IACR;AAEA,WAAO,MAAM,gBAAgB,EAAE,UAAU,kBAAkB,MAAM,CAAC;AAAA,EACpE;AACF;AAEA,eAAe,gBAAwD;AAAA,EACrE;AAAA,EACA;AACF,GAGkC;AAChC,QAAM,WAAW,SAAS;AAE1B,QAAMC,QAAO,MAAM,QAAQ;AAE3B,MAAIA,SAAQ,MAAM;AAChB,UAAM,IAAI,gBAAgB;AAAA,MACxB,UAAU,SAAS;AAAA,MACnB,gBAAgB,OAAO,KAAK,KAAK;AAAA,IACnC,CAAC;AAAA,EACH;AAEA,QAAM,aAAS,2BAASA,MAAK,UAAU;AAMvC,QAAM,cACJ,SAAS,KAAK,KAAK,MAAM,SACrB,0CAAkB,EAAE,OAAO,CAAC,GAAG,OAAO,CAAC,QACvC,sCAAc,EAAE,MAAM,SAAS,MAAM,OAAO,CAAC;AAEnD,MAAI,YAAY,YAAY,OAAO;AACjC,UAAM,IAAI,0BAA0B;AAAA,MAClC;AAAA,MACA,UAAU,SAAS;AAAA,MACnB,OAAO,YAAY;AAAA,IACrB,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,YAAY,SAAS;AAAA,IACrB;AAAA,IACA,MAAM,YAAY;AAAA,EACpB;AACF;;;ACrGO,SAAS,mBAA2D;AAAA,EACzE,MAAAC,QAAO;AAAA,EACP;AAAA,EACA;AAAA,EACA;AACF,GAKkD;AAChD,QAAM,mBAAkE,CAAC;AAEzE,mBAAiB,KAAK;AAAA,IACpB,MAAM;AAAA,IACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAAA,MAAK,GAAG,GAAG,SAAS;AAAA,EAChD,CAAC;AAED,MAAI,YAAY,SAAS,GAAG;AAC1B,qBAAiB,KAAK;AAAA,MACpB,MAAM;AAAA,MACN,SAAS,YAAY,IAAI,CAAC,eAA+B;AACvD,cAAMC,QAAO,MAAM,WAAW,QAAQ;AACtC,gBAAOA,SAAA,gBAAAA,MAAM,qCAAoC,OAC7C;AAAA,UACE,MAAM;AAAA,UACN,YAAY,WAAW;AAAA,UACvB,UAAU,WAAW;AAAA,UACrB,QAAQA,MAAK,iCAAiC,WAAW,MAAM;AAAA,UAC/D,sBAAsBA,MAAK;AAAA,YACzB,WAAW;AAAA,UACb;AAAA,QACF,IACA;AAAA,UACE,MAAM;AAAA,UACN,YAAY,WAAW;AAAA,UACvB,UAAU,WAAW;AAAA,UACrB,QAAQ,WAAW;AAAA,QACrB;AAAA,MACN,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,SAAO;AACT;;;AXtBA,IAAMC,0BAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAgD1E,eAAsB,aAGpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,qBAAqB;AAAA,EACrB,4BAA4B,gBAAgB;AAAA,EAC5C,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,0BAA0B;AAAA,EAC1B,6BAA6B;AAAA,EAC7B,WAAW;AAAA,IACT,YAAAC,cAAaD;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL;AAAA,EACA,GAAG;AACL,GAsEiD;AAlLjD,MAAAE;AAmLE,MAAI,WAAW,GAAG;AAChB,UAAM,IAAI,qBAAqB;AAAA,MAC7B,WAAW;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,EACtC,CAAC;AAED,QAAM,gBAAgB,kBAAkB;AAAA,IACtC,QAAQ;AAAA,MACN,SAAQA,OAAA,iCAAQ,uBAAuB,EAAE,QAAQ,MAAM,OAA/C,OAAAA,OAAqD;AAAA,MAC7D;AAAA,MACA;AAAA,IACF;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB;AAAA,UACvB,aAAa;AAAA,UACb;AAAA,QACF,CAAC;AAAA,QACD,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,QAC1D;AAAA,QACA,wBAAwB;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAjOtB,UAAAA,MAAA;AAkOM,YAAM,OAAO;AAAA,QACX,MAAM;AAAA,QACN,GAAG,0BAA0B,EAAE,OAAO,YAAY,YAAY,CAAC;AAAA,MACjE;AAEA,YAAM,eAAe,oBAAoB,QAAQ;AAEjD,UAAI;AAGJ,UAAI,mBAAyC,CAAC;AAC9C,UAAI,qBAA6C,CAAC;AAClD,UAAI,YAAY;AAChB,YAAM,mBACJ,CAAC;AACH,UAAIC,QAAO;AACX,YAAM,QAAoD,CAAC;AAC3D,UAAI,QAA4B;AAAA,QAC9B,kBAAkB;AAAA,QAClB,cAAc;AAAA,QACd,aAAa;AAAA,MACf;AAEA,UAAI,WAA4D;AAEhE,SAAG;AAED,cAAM,eAAe,cAAc,IAAI,cAAc,OAAO;AAE5D,cAAM,oBAAoB;AAAA,UACxB,GAAG,cAAc;AAAA,UACjB,GAAG;AAAA,QACL;AAEA,cAAM,iBAAiB,MAAM,6BAA6B;AAAA,UACxD,QAAQ;AAAA,YACN,MAAM;AAAA,YACN,QAAQ,cAAc;AAAA,YACtB,UAAU;AAAA,UACZ;AAAA,UACA,wBAAwB,MAAM;AAAA,UAC9B,kBAAkB,MAAM;AAAA,QAC1B,CAAC;AAED,+BAAuB,MAAM;AAAA,UAAM,MACjC,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA,gBACH,oBAAoB,EAAE,OAAO,MAAM,aAAa;AAAA,gBAChD,sBAAsB;AAAA,kBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,gBAC5C;AAAA,gBACA,mBAAmB;AAAA;AAAA,kBAEjB,OAAO,MAAG;AA/R5B,wBAAAD;AA+R+B,4BAAAA,OAAA,KAAK,UAAL,gBAAAA,KAAY,IAAI,CAAAE,UAAQ,KAAK,UAAUA,KAAI;AAAA;AAAA,gBAC1D;AAAA,gBACA,wBAAwB;AAAA,kBACtB,OAAO,MACL,KAAK,cAAc,OACf,KAAK,UAAU,KAAK,UAAU,IAC9B;AAAA,gBACR;AAAA;AAAA,gBAGA,iBAAiB,MAAM;AAAA,gBACvB,wBAAwB,MAAM;AAAA,gBAC9B,oCAAoC,SAAS;AAAA,gBAC7C,6BAA6B,SAAS;AAAA,gBACtC,mCAAmC,SAAS;AAAA,gBAC5C,iCAAiC,SAAS;AAAA,gBAC1C,8BAA8B,SAAS;AAAA,gBACvC,wBAAwB,SAAS;AAAA,gBACjC,wBAAwB,SAAS;AAAA,cACnC;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAMC,UAAQ;AArT9B,kBAAAH,MAAAI,KAAAC,KAAAC,KAAAC,KAAAC;AAsTc,oBAAM,SAAS,MAAM,MAAM,WAAW;AAAA,gBACpC;AAAA,gBACA,GAAG;AAAA,gBACH,aAAa;AAAA,gBACb,gBAAgB,iCAAQ,eAAe,EAAE,MAAM;AAAA,gBAC/C,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC;AAGD,oBAAM,eAAe;AAAA,gBACnB,KAAIJ,OAAAJ,OAAA,OAAO,aAAP,gBAAAA,KAAiB,OAAjB,OAAAI,MAAuBL,YAAW;AAAA,gBACtC,YAAWO,OAAAD,MAAA,OAAO,aAAP,gBAAAA,IAAiB,cAAjB,OAAAC,MAA8B,YAAY;AAAA,gBACrD,UAASE,OAAAD,MAAA,OAAO,aAAP,gBAAAA,IAAiB,YAAjB,OAAAC,MAA4B,MAAM;AAAA,cAC7C;AAGA,cAAAL,MAAK;AAAA,gBACH,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,4BAA4B,OAAO;AAAA,oBACnC,oBAAoB;AAAA,sBAClB,QAAQ,MAAM,OAAO;AAAA,oBACvB;AAAA,oBACA,yBAAyB;AAAA,sBACvB,QAAQ,MAAM,KAAK,UAAU,OAAO,SAAS;AAAA,oBAC/C;AAAA,oBACA,kBAAkB,aAAa;AAAA,oBAC/B,qBAAqB,aAAa;AAAA,oBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,oBAErC,yBAAyB,OAAO,MAAM;AAAA,oBACtC,6BAA6B,OAAO,MAAM;AAAA;AAAA,oBAG1C,kCAAkC,CAAC,OAAO,YAAY;AAAA,oBACtD,sBAAsB,aAAa;AAAA,oBACnC,yBAAyB,aAAa;AAAA,oBACtC,6BAA6B,OAAO,MAAM;AAAA,oBAC1C,8BAA8B,OAAO,MAAM;AAAA,kBAC7C;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,GAAG,QAAQ,UAAU,aAAa;AAAA,YAC7C;AAAA,UACF,CAAC;AAAA,QACH;AAGA,2BAAmB,MAAM,QAAQ;AAAA,YAC9BH,OAAA,qBAAqB,cAArB,OAAAA,OAAkC,CAAC,GAAG;AAAA,YAAI,cACzC,cAAc;AAAA,cACZ;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA,UAAU;AAAA,YACZ,CAAC;AAAA,UACH;AAAA,QACF;AAGA,6BACE,SAAS,OACL,CAAC,IACD,MAAM,aAAa;AAAA,UACjB,WAAW;AAAA,UACX;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV;AAAA,QACF,CAAC;AAGP,cAAM,eAAe;AAAA,UACnB,qBAAqB;AAAA,QACvB;AACA,gBAAQ,sBAAsB,OAAO,YAAY;AAGjD,YAAI,eAAoD;AACxD,YAAI,EAAE,YAAY,UAAU;AAC1B,cACE,iBACA,qBAAqB,iBAAiB;AAAA,UAEtC,iBAAiB,WAAW,GAC5B;AACA,2BAAe;AAAA,UACjB;AAAA;AAAA,YAEE,iBAAiB,SAAS;AAAA,YAE1B,mBAAmB,WAAW,iBAAiB;AAAA,YAC/C;AACA,2BAAe;AAAA,UACjB;AAAA,QACF;AAGA,cAAM,gBAAe,0BAAqB,SAArB,YAA6B;AAClD,cAAM,mCACJ,aAAa;AAAA,QACbC,MAAK,QAAQ,MAAMA,QACf,aAAa,UAAU,IACvB;AACN,cAAM,WACJ,iBAAiB,aACb,8BAA8B,gCAAgC,IAC9D;AAEN,QAAAA,QACE,iBAAiB,cAAc,aAAa,aACxCA,QAAO,WACP;AAGN,YAAI,aAAa,YAAY;AAI3B,gBAAM,cAAc,iBAClB,iBAAiB,SAAS,CAC5B;AAEA,cAAI,OAAO,YAAY,YAAY,UAAU;AAC3C,wBAAY,WAAW;AAAA,UACzB,OAAO;AACL,wBAAY,QAAQ,KAAK;AAAA,cACvB,MAAM;AAAA,cACN,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF,OAAO;AACL,2BAAiB;AAAA,YACf,GAAG,mBAAmB;AAAA,cACpB,MAAAA;AAAA,cACA,OAAO,wBAAU,CAAC;AAAA,cAClB,WAAW;AAAA,cACX,aAAa;AAAA,YACf,CAAC;AAAA,UACH;AAAA,QACF;AAGA,cAAM,oBAAuC;AAAA,UAC3C;AAAA,UACA,MAAM;AAAA,UACN,WAAW;AAAA,UACX,aAAa;AAAA,UACb,cAAc,qBAAqB;AAAA,UACnC,OAAO;AAAA,UACP,UAAU,qBAAqB;AAAA,UAC/B,UAAU,qBAAqB;AAAA,UAC/B,UAAS,0BAAqB,YAArB,YAAgC,CAAC;AAAA,UAC1C,UAAU;AAAA,YACR,GAAG,qBAAqB;AAAA,YACxB,UAAS,0BAAqB,gBAArB,mBAAkC;AAAA;AAAA,YAG3C,UAAU,KAAK,MAAM,KAAK,UAAU,gBAAgB,CAAC;AAAA,UACvD;AAAA,UACA,+BAA+B,qBAAqB;AAAA,UACpD,aAAa,iBAAiB;AAAA,QAChC;AACA,cAAM,KAAK,iBAAiB;AAC5B,eAAM,6CAAe;AAErB,mBAAW;AAAA,MACb,SAAS,aAAa;AAGtB,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,4BAA4B,qBAAqB;AAAA,YACjD,oBAAoB;AAAA,cAClB,QAAQ,MAAM,qBAAqB;AAAA,YACrC;AAAA,YACA,yBAAyB;AAAA,cACvB,QAAQ,MAAM,KAAK,UAAU,qBAAqB,SAAS;AAAA,YAC7D;AAAA,YAEA,yBAAyB,qBAAqB,MAAM;AAAA,YACpD,6BACE,qBAAqB,MAAM;AAAA,UAC/B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,0BAA0B;AAAA,QACnC,MAAAA;AAAA,QACA,QACE,UAAU,OACL,SACD,OAAO;AAAA,UACL,EAAE,MAAAA,MAAK;AAAA,UACP;AAAA,YACE,UAAU,qBAAqB;AAAA,YAC/B;AAAA,UACF;AAAA,QACF;AAAA,QACN,WAAW;AAAA,QACX,aAAa;AAAA,QACb,cAAc,qBAAqB;AAAA,QACnC;AAAA,QACA,UAAU,qBAAqB;AAAA,QAC/B,UAAS,0BAAqB,YAArB,YAAgC,CAAC;AAAA,QAC1C,UAAU;AAAA,UACR,GAAG,qBAAqB;AAAA,UACxB,UAAS,0BAAqB,gBAArB,mBAAkC;AAAA,UAC3C,UAAU;AAAA,QACZ;AAAA,QACA,UAAU,qBAAqB;AAAA,QAC/B;AAAA,QACA,kBAAkB,qBAAqB;AAAA,MACzC,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,eAAe,aAAqD;AAAA,EAClE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAOoC;AAClC,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,UAAU,IAAI,OAAO,EAAE,YAAY,UAAU,KAAK,MAAM;AACtD,YAAMC,QAAO,MAAM,QAAQ;AAE3B,WAAIA,SAAA,gBAAAA,MAAM,YAAW,MAAM;AACzB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,WAAW;AAAA,QAC9B,MAAM;AAAA,QACN,YAAY,0BAA0B;AAAA,UACpC;AAAA,UACA,YAAY;AAAA,YACV,GAAG,sBAAsB;AAAA,cACvB,aAAa;AAAA,cACb;AAAA,YACF,CAAC;AAAA,YACD,oBAAoB;AAAA,YACpB,kBAAkB;AAAA,YAClB,oBAAoB;AAAA,cAClB,QAAQ,MAAM,KAAK,UAAU,IAAI;AAAA,YACnC;AAAA,UACF;AAAA,QACF,CAAC;AAAA,QACD;AAAA,QACA,IAAI,OAAM,SAAQ;AAChB,cAAI;AACF,kBAAMO,UAAS,MAAMP,MAAK,QAAS,MAAM;AAAA,cACvC;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAED,gBAAI;AACF,mBAAK;AAAA,gBACH,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,sBAAsB;AAAA,sBACpB,QAAQ,MAAM,KAAK,UAAUO,OAAM;AAAA,oBACrC;AAAA,kBACF;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF,SAAS,SAAS;AAAA,YAKlB;AAEA,mBAAOA;AAAA,UACT,SAAS,OAAO;AACd,kBAAM,IAAI,mBAAmB;AAAA,cAC3B;AAAA,cACA,UAAU;AAAA,cACV,OAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,YAAY;AAAA,IACjB,CAAC,WAAiD,UAAU;AAAA,EAC9D;AACF;AAEA,IAAM,4BAAN,MAEA;AAAA,EAoBE,YAAY,SAgBT;AACD,SAAK,OAAO,QAAQ;AACpB,SAAK,YAAY,QAAQ;AACzB,SAAK,cAAc,QAAQ;AAC3B,SAAK,eAAe,QAAQ;AAC5B,SAAK,QAAQ,QAAQ;AACrB,SAAK,WAAW,QAAQ;AACxB,SAAK,UAAU,QAAQ;AACvB,SAAK,WAAW,QAAQ;AACxB,SAAK,QAAQ,QAAQ;AACrB,SAAK,gCAAgC,QAAQ;AAC7C,SAAK,WAAW,QAAQ;AACxB,SAAK,sBAAsB,QAAQ;AAAA,EACrC;AACF;;;AY1qBA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,0BAAiD;AACjD,IAAAC,mBAAiC;AA6B1B,IAAM,OAAO,OAAuB;AAAA,EACzC,MAAM;AAAA,EACN,gBAAgB,OAAO,EAAE,MAAM,OAAO;AAAA,EACtC,uBAAuB,EAAE,OAAO,GAAmC;AACjE,WAAO;AAAA,EACT;AAAA,EACA,YAAY,EAAE,MAAAC,MAAK,GAAqB;AACtC,WAAOA;AAAA,EACT;AACF;AAEO,IAAM,SAAS,CAAS;AAAA,EAC7B,QAAQ;AACV,MAEsB;AACpB,QAAM,aAAS,2BAAS,WAAW;AAEnC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBAAgB,CAAC,EAAE,MAAM,OAAO;AAAA,MAC9B,MAAM;AAAA,MACN,QAAQ,MAAM,4BAA4B,OAAO,aAAa;AAAA,IAChE;AAAA,IACA,uBAAuB,EAAE,QAAQ,MAAM,GAAG;AAGxC,aAAO,MAAM,4BACT,SACA,sBAAsB;AAAA,QACpB,QAAQ;AAAA,QACR,QAAQ,OAAO;AAAA,MACjB,CAAC;AAAA,IACP;AAAA,IACA,YACE,EAAE,MAAAA,MAAK,GACP,SAIA;AACA,YAAM,kBAAc,uCAAc,EAAE,MAAAA,MAAK,CAAC;AAE1C,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,YAAY;AAAA,UACnB,MAAAA;AAAA,UACA,UAAU,QAAQ;AAAA,UAClB,OAAO,QAAQ;AAAA,QACjB,CAAC;AAAA,MACH;AAEA,YAAM,uBAAmB,2CAAkB;AAAA,QACzC,OAAO,YAAY;AAAA,QACnB;AAAA,MACF,CAAC;AAED,UAAI,CAAC,iBAAiB,SAAS;AAC7B,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,iBAAiB;AAAA,UACxB,MAAAA;AAAA,UACA,UAAU,QAAQ;AAAA,UAClB,OAAO,QAAQ;AAAA,QACjB,CAAC;AAAA,MACH;AAEA,aAAO,iBAAiB;AAAA,IAC1B;AAAA,EACF;AACF;;;ACrGA,IAAAC,0BAAkC;AAClC,IAAAC,mBAAuD;;;ACYhD,SAAS,aACd,SACA,SACiC;AACjC,QAAM,UAAU,QAAQ,UAAU;AAClC,QAAM,UAAU,QAAQ,UAAU;AAElC,MAAI,YACF;AACF,MAAI,YACF;AAEF,MAAI,cAAc;AAClB,MAAI,cAAc;AAGlB,iBAAe,YACb,YACA;AACA,QAAI;AACF,UAAI,aAAa,MAAM;AACrB,oBAAY,QAAQ,KAAK;AAAA,MAC3B;AAEA,YAAM,SAAS,MAAM;AACrB,kBAAY;AAEZ,UAAI,CAAC,OAAO,MAAM;AAChB,mBAAW,QAAQ,OAAO,KAAK;AAAA,MACjC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,SAAS,OAAO;AACd,iBAAW,MAAM,KAAK;AAAA,IACxB;AAAA,EACF;AAGA,iBAAe,YACb,YACA;AACA,QAAI;AACF,UAAI,aAAa,MAAM;AACrB,oBAAY,QAAQ,KAAK;AAAA,MAC3B;AAEA,YAAM,SAAS,MAAM;AACrB,kBAAY;AAEZ,UAAI,CAAC,OAAO,MAAM;AAChB,mBAAW,QAAQ,OAAO,KAAK;AAAA,MACjC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,SAAS,OAAO;AACd,iBAAW,MAAM,KAAK;AAAA,IACxB;AAAA,EACF;AAEA,SAAO,IAAI,eAAgC;AAAA,IACzC,MAAM,KAAK,YAAY;AACrB,UAAI;AAEF,YAAI,aAAa;AACf,gBAAM,YAAY,UAAU;AAC5B;AAAA,QACF;AAGA,YAAI,aAAa;AACf,gBAAM,YAAY,UAAU;AAC5B;AAAA,QACF;AAGA,YAAI,aAAa,MAAM;AACrB,sBAAY,QAAQ,KAAK;AAAA,QAC3B;AACA,YAAI,aAAa,MAAM;AACrB,sBAAY,QAAQ,KAAK;AAAA,QAC3B;AAKA,cAAM,EAAE,QAAQ,OAAO,IAAI,MAAM,QAAQ,KAAK;AAAA,UAC5C,UAAU,KAAK,CAAAC,aAAW,EAAE,QAAAA,SAAQ,QAAQ,QAAQ,EAAE;AAAA,UACtD,UAAU,KAAK,CAAAA,aAAW,EAAE,QAAAA,SAAQ,QAAQ,QAAQ,EAAE;AAAA,QACxD,CAAC;AAED,YAAI,CAAC,OAAO,MAAM;AAChB,qBAAW,QAAQ,OAAO,KAAK;AAAA,QACjC;AAEA,YAAI,WAAW,SAAS;AACtB,sBAAY;AACZ,cAAI,OAAO,MAAM;AAEf,kBAAM,YAAY,UAAU;AAC5B,0BAAc;AAAA,UAChB;AAAA,QACF,OAAO;AACL,sBAAY;AAEZ,cAAI,OAAO,MAAM;AACf,0BAAc;AACd,kBAAM,YAAY,UAAU;AAAA,UAC9B;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,cAAQ,OAAO;AACf,cAAQ,OAAO;AAAA,IACjB;AAAA,EACF,CAAC;AACH;;;AClIA,IAAAC,mBAA2B;AA+DpB,SAAS,uBAA+D;AAAA,EAC7E;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAUuD;AAErD,MAAI,8BAEO;AACX,QAAM,oBAAoB,IAAI,eAE5B;AAAA,IACA,MAAM,YAAY;AAChB,oCAA8B;AAAA,IAChC;AAAA,EACF,CAAC;AAGD,QAAM,kBAA2C,CAAC;AAGlD,QAAM,yBAAyB,oBAAI,IAAY;AAE/C,MAAI,WAAW;AACf,MAAI,cAEY;AAEhB,WAAS,eAAe;AAEtB,QAAI,YAAY,uBAAuB,SAAS,GAAG;AAIjD,UAAI,eAAe,MAAM;AACvB,oCAA6B,QAAQ,WAAW;AAAA,MAClD;AAEA,kCAA6B,MAAM;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,gBAAgB,IAAI,gBAGxB;AAAA,IACA,MAAM,UACJ,OACA,YAGA;AACA,YAAM,YAAY,MAAM;AAExB,cAAQ,WAAW;AAAA,QAEjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,SAAS;AACZ,qBAAW,QAAQ,KAAK;AACxB;AAAA,QACF;AAAA,QAGA,KAAK,mBAAmB;AACtB,cAAI,mBAAmB;AACrB,gBAAI,CAAC,gBAAgB,MAAM,UAAU,GAAG;AACtC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,cAClB,CAAC;AAED,8BAAgB,MAAM,UAAU,IAAI;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,YAAY,MAAM;AAAA,cAClB,UAAU,MAAM;AAAA,cAChB,eAAe,MAAM;AAAA,YACvB,CAAC;AAAA,UACH;AACA;AAAA,QACF;AAAA,QAGA,KAAK,aAAa;AAChB,cAAI;AACF,kBAAM,WAAW,MAAM,cAAc;AAAA,cACnC,UAAU;AAAA,cACV;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAED,uBAAW,QAAQ,QAAQ;AAE3B,kBAAMC,QAAO,MAAO,SAAS,QAAQ;AAErC,gBAAIA,MAAK,WAAW,MAAM;AACxB,oBAAM,sBAAkB,6BAAW;AACnC,qCAAuB,IAAI,eAAe;AAK1C,yBAAW;AAAA,gBACT,MAAM;AAAA,gBACN,YAAY,0BAA0B;AAAA,kBACpC;AAAA,kBACA,YAAY;AAAA,oBACV,GAAG,sBAAsB;AAAA,sBACvB,aAAa;AAAA,sBACb;AAAA,oBACF,CAAC;AAAA,oBACD,oBAAoB,SAAS;AAAA,oBAC7B,kBAAkB,SAAS;AAAA,oBAC3B,oBAAoB;AAAA,sBAClB,QAAQ,MAAM,KAAK,UAAU,SAAS,IAAI;AAAA,oBAC5C;AAAA,kBACF;AAAA,gBACF,CAAC;AAAA,gBACD;AAAA,gBACA,IAAI,OAAM,SACRA,MAAK,QAAS,SAAS,MAAM;AAAA,kBAC3B,YAAY,SAAS;AAAA,kBACrB;AAAA,kBACA;AAAA,gBACF,CAAC,EAAE;AAAA,kBACD,CAAC,WAAgB;AACf,gDAA6B,QAAQ;AAAA,sBACnC,GAAG;AAAA,sBACH,MAAM;AAAA,sBACN;AAAA,oBACF,CAAQ;AAER,2CAAuB,OAAO,eAAe;AAE7C,iCAAa;AAGb,wBAAI;AACF,2BAAK;AAAA,wBACH,0BAA0B;AAAA,0BACxB;AAAA,0BACA,YAAY;AAAA,4BACV,sBAAsB;AAAA,8BACpB,QAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,4BACrC;AAAA,0BACF;AAAA,wBACF,CAAC;AAAA,sBACH;AAAA,oBACF,SAAS,SAAS;AAAA,oBAKlB;AAAA,kBACF;AAAA,kBACA,CAAC,UAAe;AACd,gDAA6B,QAAQ;AAAA,sBACnC,MAAM;AAAA,sBACN,OAAO,IAAI,mBAAmB;AAAA,wBAC5B,UAAU,SAAS;AAAA,wBACnB,UAAU,SAAS;AAAA,wBACnB,OAAO;AAAA,sBACT,CAAC;AAAA,oBACH,CAAC;AAED,2CAAuB,OAAO,eAAe;AAC7C,iCAAa;AAAA,kBACf;AAAA,gBACF;AAAA,cACJ,CAAC;AAAA,YACH;AAAA,UACF,SAAS,OAAO;AACd,wCAA6B,QAAQ;AAAA,cACnC,MAAM;AAAA,cACN;AAAA,YACF,CAAC;AAAA,UACH;AAEA;AAAA,QACF;AAAA,QAEA,KAAK,UAAU;AACb,wBAAc;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,MAAM;AAAA,YACpB,UAAU,MAAM;AAAA,YAChB,OAAO,4BAA4B,MAAM,KAAK;AAAA,YAC9C,+BAA+B,MAAM;AAAA,UACvC;AACA;AAAA,QACF;AAAA,QAEA,SAAS;AACP,gBAAM,mBAA0B;AAChC,gBAAM,IAAI,MAAM,yBAAyB,gBAAgB,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,IAEA,QAAQ;AACN,iBAAW;AACX,mBAAa;AAAA,IACf;AAAA,EACF,CAAC;AAGD,SAAO,IAAI,eAAmD;AAAA,IAC5D,MAAM,MAAM,YAAY;AAGtB,aAAO,QAAQ,IAAI;AAAA,QACjB,gBAAgB,YAAY,aAAa,EAAE;AAAA,UACzC,IAAI,eAAe;AAAA,YACjB,MAAM,OAAO;AACX,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,YACA,QAAQ;AAAA,YAER;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,kBAAkB;AAAA,UAChB,IAAI,eAAe;AAAA,YACjB,MAAM,OAAO;AACX,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,YACA,QAAQ;AACN,yBAAW,MAAM;AAAA,YACnB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;;;AF1QA,IAAMC,0BAAqB,2CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAiDnE,SAAS,WAAmD;AAAA,EACjE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,4BAA4B,gBAAgB;AAAA,EAC5C,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,gCAAgC,oBAAoB;AAAA,EACpD,0BAA0B;AAAA,EAC1B,6BAA6B;AAAA,EAC7B,wBAAwB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW;AAAA,IACT,KAAAC,OAAM;AAAA,IACN,YAAAC,cAAaF;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GAiH8B;AAC5B,SAAO,IAAI,wBAAwB;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,uCAAY,EAAE,MAAsB;AAAA,IAC/C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,IACA;AAAA,IACA,YAAAC;AAAA,EACF,CAAC;AACH;AAEA,IAAM,0BAAN,MAEA;AAAA,EAwCE,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,KAAAD;AAAA,IACA;AAAA,IACA,YAAAC;AAAA,EACF,GAiDG;AAjHH,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,eAAe,IAAI,eAElC;AACF,SAAiB,sBAAsB,IAAI,eAEzC;AACF,SAAiB,0BAA0B,IAAI,eAE7C;AACF,SAAiB,cAAc,IAAI,eAEjC;AACF,SAAiB,mBAAmB,IAAI,eAEtC;AACF,SAAiB,qBAAqB,IAAI,eAExC;AACF,SAAiB,iBAAiB,IAAI,eAEpC;AACF,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,eAAe,IAAI,eAElC;AAqFA,QAAI,WAAW,GAAG;AAChB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAKA,QAAI,mBAAmB;AACvB,QAAI,2BAA2B;AAC/B,QAAI,mBAAmB;AACvB,QAAI,kBAA4D;AAChE,UAAM,mBAEF;AAAA,MACF,IAAIA,YAAW;AAAA,MACf,WAAW,YAAY;AAAA,MACvB,SAAS,MAAM;AAAA,MACf,UAAU,CAAC;AAAA,IACb;AACA,QAAI,oBAA4C,CAAC;AACjD,QAAI,sBAAgD,CAAC;AACrD,QAAI,uBAAiD;AACrD,QAAI,gBAAgD;AACpD,QAAI,2BAAyD;AAC7D,QAAI,WAAmD;AACvD,UAAM,gBAAqC,CAAC;AAC5C,QAAI;AAEJ,UAAM,iBAAiB,IAAI,gBAGzB;AAAA,MACA,MAAM,UAAU,OAAO,YAAY;AACjC,mBAAW,QAAQ,KAAK;AAExB,YACE,MAAM,SAAS,gBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,+BACf,MAAM,SAAS,mBACf;AACA,iBAAM,mCAAU,EAAE,MAAM;AAAA,QAC1B;AAEA,YAAI,MAAM,SAAS,cAAc;AAC/B,8BAAoB,MAAM;AAC1B,sCAA4B,MAAM;AAClC,8BAAoB,MAAM;AAAA,QAC5B;AAEA,YAAI,MAAM,SAAS,aAAa;AAC9B,4BAAkB,KAAK,KAAK;AAAA,QAC9B;AAEA,YAAI,MAAM,SAAS,eAAe;AAChC,8BAAoB,KAAK,KAAK;AAAA,QAChC;AAEA,YAAI,MAAM,SAAS,eAAe;AAChC,gBAAM,eAAe,mBAAmB;AAAA,YACtC,MAAM;AAAA,YACN,OAAO,wBAAU,CAAC;AAAA,YAClB,WAAW;AAAA,YACX,aAAa;AAAA,UACf,CAAC;AAGD,gBAAM,cAAc,cAAc;AAClC,cAAI,eAAoD;AACxD,cAAI,cAAc,IAAI,UAAU;AAC9B,gBACE,iBACA,MAAM,iBAAiB;AAAA,YAEvB,kBAAkB,WAAW,GAC7B;AACA,6BAAe;AAAA,YACjB;AAAA;AAAA,cAEE,kBAAkB,SAAS;AAAA,cAE3B,oBAAoB,WAAW,kBAAkB;AAAA,cACjD;AACA,6BAAe;AAAA,YACjB;AAAA,UACF;AAGA,gBAAM,oBAAuC;AAAA,YAC3C;AAAA,YACA,MAAM;AAAA,YACN,WAAW;AAAA,YACX,aAAa;AAAA,YACb,cAAc,MAAM;AAAA,YACpB,OAAO,MAAM;AAAA,YACb,UAAU,MAAM;AAAA,YAChB,UAAU,MAAM;AAAA,YAChB,SAAS,MAAM;AAAA,YACf,UAAU;AAAA,cACR,GAAG,MAAM;AAAA,cACT,UAAU,CAAC,GAAG,iBAAiB,UAAU,GAAG,YAAY;AAAA,YAC1D;AAAA,YACA,+BAA+B,MAAM;AAAA,YACrC,aAAa,MAAM;AAAA,UACrB;AAEA,iBAAM,6CAAe;AAErB,wBAAc,KAAK,iBAAiB;AAEpC,8BAAoB,CAAC;AACrB,gCAAsB,CAAC;AACvB,6BAAmB;AACnB,4BAAkB,MAAM;AAExB,cAAI,iBAAiB,QAAQ;AAC3B,uBAAW;AAAA,UACb;AAEA,cAAI,iBAAiB,YAAY;AAC/B,6BAAiB,SAAS,KAAK,GAAG,YAAY;AAC9C,uCAA2B;AAAA,UAC7B;AAAA,QACF;AAEA,YAAI,MAAM,SAAS,UAAU;AAC3B,2BAAiB,KAAK,MAAM,SAAS;AACrC,2BAAiB,YAAY,MAAM,SAAS;AAC5C,2BAAiB,UAAU,MAAM,SAAS;AAC1C,2BAAiB,UAAU,MAAM,SAAS;AAC1C,0BAAgB,MAAM;AACtB,iCAAuB,MAAM;AAC7B,qCAA2B,MAAM;AAAA,QACnC;AAAA,MACF;AAAA,MAEA,MAAM,MAAM,YAAY;AAnhB9B,YAAAC;AAohBQ,YAAI;AAEF,gBAAM,WAAW,cAAc,cAAc,SAAS,CAAC;AACvD,cAAI,UAAU;AACZ,iBAAK,gBAAgB,QAAQ,SAAS,QAAQ;AAC9C,iBAAK,eAAe,QAAQ,SAAS,OAAO;AAC5C,iBAAK,gBAAgB,QAAQ,SAAS,QAAQ;AAC9C,iBAAK,iBAAiB,QAAQ,SAAS,SAAS;AAChD,iBAAK,mBAAmB,QAAQ,SAAS,WAAW;AACpD,iBAAK,wBAAwB;AAAA,cAC3B,SAAS;AAAA,YACX;AAAA,UACF;AAGA,gBAAM,eAAe,sDAAwB;AAC7C,gBAAM,QAAQ,wCAAiB;AAAA,YAC7B,kBAAkB;AAAA,YAClB,cAAc;AAAA,YACd,aAAa;AAAA,UACf;AAGA,eAAK,oBAAoB,QAAQ,YAAY;AAC7C,eAAK,aAAa,QAAQ,KAAK;AAG/B,eAAK,YAAY,QAAQ,gBAAgB;AACzC,eAAK,aAAa,QAAQ,aAAa;AAGvC,iBAAM,qCAAW;AAAA,YACf;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,MAAM;AAAA,YACN,WAAW,SAAS;AAAA,YACpB,aAAa,SAAS;AAAA,YACtB,UAASA,OAAA,SAAS,YAAT,OAAAA,OAAoB,CAAC;AAAA,YAC9B,UAAU,SAAS;AAAA,YACnB,UAAU,SAAS;AAAA,YACnB,+BACE,SAAS;AAAA,YACX,OAAO;AAAA,UACT;AAGA,mBAAS;AAAA,YACP,0BAA0B;AAAA,cACxB;AAAA,cACA,YAAY;AAAA,gBACV,4BAA4B;AAAA,gBAC5B,oBAAoB,EAAE,QAAQ,MAAM,iBAAiB;AAAA,gBACrD,yBAAyB;AAAA,kBACvB,QAAQ,MAAG;AA1kB7B,wBAAAA;AA2kBoB,6BAAAA,OAAA,SAAS,cAAT,gBAAAA,KAAoB,UAChB,KAAK,UAAU,SAAS,SAAS,IACjC;AAAA;AAAA,gBACR;AAAA,gBAEA,yBAAyB,MAAM;AAAA,gBAC/B,6BAA6B,MAAM;AAAA,cACrC;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,qBAAW,MAAM,KAAK;AAAA,QACxB,UAAE;AACA,mBAAS,IAAI;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,mBAAmB,uBAA8C;AACvE,SAAK,YAAY,iBAAiB;AAClC,SAAK,cAAc,iBAAiB;AACpC,SAAK,cACH,YACI,iBAAiB,OAAO,YAAY,SAAS,IAC7C,iBAAiB,QACrB,YAAY,cAAc;AAE5B,UAAM,EAAE,YAAY,MAAM,IAAI,eAAe;AAAA,MAC3C,YAAY;AAAA,IACd,CAAC;AAED,UAAM,SAAS,UAAU,SAAS;AAElC,UAAM,0BAA0B,2BAA2B;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,IACtC,CAAC;AAED,UAAM,gBAAgB,kBAAkB;AAAA,MACtC,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,MACnC;AAAA,IACF,CAAC;AAED,UAAM,OAAO;AAEb,eAAW;AAAA,MACT,MAAM;AAAA,MACN,YAAY,0BAA0B;AAAA,QACpC;AAAA,QACA,YAAY;AAAA,UACV,GAAG,sBAAsB,EAAE,aAAa,iBAAiB,UAAU,CAAC;AAAA,UACpE,GAAG;AAAA;AAAA,UAEH,aAAa;AAAA,YACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,UAC1D;AAAA,UACA,wBAAwB;AAAA,QAC1B;AAAA,MACF,CAAC;AAAA,MACD;AAAA,MACA,aAAa;AAAA,MACb,IAAI,OAAM,gBAAe;AACvB,mBAAW;AAEX,uBAAe,WAAW;AAAA,UACxB;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAAC;AAAA,UACA;AAAA,UACA;AAAA,QACF,GAOG;AAED,gBAAM,eACJ,iBAAiB,WAAW,IAAI,cAAc,OAAO;AAEvD,gBAAM,oBAAoB;AAAA,YACxB,GAAG,cAAc;AAAA,YACjB,GAAG;AAAA,UACL;AAEA,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,cACN,MAAM;AAAA,cACN,QAAQ,cAAc;AAAA,cACtB,UAAU;AAAA,YACZ;AAAA,YACA,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AAED,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,GAAG,0BAA0B,EAAE,OAAO,YAAY,YAAY,CAAC;AAAA,UACjE;AAEA,gBAAM;AAAA,YACJ,QAAQ,EAAE,QAAQ,UAAU,aAAa,QAAQ;AAAA,YACjD;AAAA,YACA;AAAA,UACF,IAAI,MAAM;AAAA,YAAM,MACd,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM;AAAA,kBACf;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,mBAAmB;AAAA;AAAA,oBAEjB,OAAO,MAAG;AA5sB9B,0BAAAD;AA4sBiC,8BAAAA,OAAA,KAAK,UAAL,gBAAAA,KAAY,IAAI,CAAAE,UAAQ,KAAK,UAAUA,KAAI;AAAA;AAAA,kBAC1D;AAAA,kBACA,wBAAwB;AAAA,oBACtB,OAAO,MACL,KAAK,cAAc,OACf,KAAK,UAAU,KAAK,UAAU,IAC9B;AAAA,kBACR;AAAA;AAAA,kBAGA,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,iCAAiC,SAAS;AAAA,kBAC1C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,aAAa;AAAA,cACb,IAAI,OAAMC,mBAAiB;AAAA,gBACzB,kBAAkBL,KAAI;AAAA;AAAA,gBACtB,cAAAK;AAAA,gBACA,QAAQ,MAAM,MAAM,SAAS;AAAA,kBAC3B;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH;AAEA,gBAAM,oBAAoB,uBAAuB;AAAA,YAC/C;AAAA,YACA,iBAAiB;AAAA,YACjB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA;AAAA,UACF,CAAC;AAED,gBAAM,cAAc,4BAAW,CAAC;AAChC,gBAAM,gBAAwC,CAAC;AAC/C,gBAAM,kBAA4C,CAAC;AACnD,cAAI,mBAAiC;AACrC,cAAI,YAAgC;AAAA,YAClC,cAAc;AAAA,YACd,kBAAkB;AAAA,YAClB,aAAa;AAAA,UACf;AACA,cAAI;AACJ,cAAI,iBAAiB;AACrB,cAAI,WAAW;AACf,cAAI,eAAeF,cAAa,aAAa,mBAAmB;AAChE,cAAI;AACJ,cAAI,eAAiE;AAAA,YACnE,IAAIF,YAAW;AAAA,YACf,WAAW,YAAY;AAAA,YACvB,SAAS,MAAM;AAAA,UACjB;AAGA,cAAI,cAAc;AAClB,cAAI,qBAAqB;AACzB,cAAI,qBAAqB;AACzB,cAAI,sBAAsB;AAE1B,yBAAe,iBAAiB;AAAA,YAC9B;AAAA,YACA;AAAA,UACF,GAGG;AACD,uBAAW,QAAQ,KAAK;AAExB,wBAAY,MAAM;AAClB,4BAAgB,MAAM;AACtB,iCAAqB;AACrB,kCAAsB,MAAM,UAAU,QAAQ,MAAM,MAAM;AAAA,UAC5D;AAEA,eAAK;AAAA,YACH,kBAAkB;AAAA,cAChB,IAAI,gBAGF;AAAA,gBACA,MAAM,UAAU,OAAO,YAA2B;AA9yBlE,sBAAAC,MAAA;AAgzBkB,sBAAI,gBAAgB;AAClB,0BAAM,iBAAiBF,KAAI,IAAI;AAE/B,qCAAiB;AAEjB,iCAAa,SAAS,wBAAwB;AAAA,sBAC5C,8BAA8B;AAAA,oBAChC,CAAC;AAED,iCAAa,cAAc;AAAA,sBACzB,8BAA8B;AAAA,oBAChC,CAAC;AAAA,kBACH;AAGA,sBACE,MAAM,SAAS,gBACf,MAAM,UAAU,WAAW,GAC3B;AACA;AAAA,kBACF;AAEA,wBAAM,YAAY,MAAM;AACxB,0BAAQ,WAAW;AAAA,oBACjB,KAAK,cAAc;AACjB,0BAAI,eAAe;AAGjB,8BAAM,mBACJ,sBAAsB,uBAClB,MAAM,UAAU,UAAU,IAC1B,MAAM;AAEZ,4BAAI,iBAAiB,WAAW,GAAG;AACjC;AAAA,wBACF;AAEA,6CAAqB;AACrB,uCAAe;AAEf,8BAAM,QAAQ,sBAAsB,WAAW;AAG/C,4BAAI,SAAS,MAAM;AACjB,wCAAc,MAAM;AAEpB,gCAAM,iBAAiB;AAAA,4BACrB;AAAA,4BACA,OAAO;AAAA,8BACL,MAAM;AAAA,8BACN,WAAW,MAAM,SAAS,MAAM;AAAA,4BAClC;AAAA,0BACF,CAAC;AAAA,wBACH;AAAA,sBACF,OAAO;AACL,8BAAM,iBAAiB,EAAE,YAAY,MAAM,CAAC;AAAA,sBAC9C;AAEA;AAAA,oBACF;AAAA,oBAEA,KAAK,aAAa;AAChB,iCAAW,QAAQ,KAAK;AAExB,oCAAc,KAAK,KAAK;AACxB;AAAA,oBACF;AAAA,oBAEA,KAAK,eAAe;AAClB,iCAAW,QAAQ,KAAK;AAExB,sCAAgB,KAAK,KAAK;AAC1B;AAAA,oBACF;AAAA,oBAEA,KAAK,qBAAqB;AACxB,qCAAe;AAAA,wBACb,KAAIE,OAAA,MAAM,OAAN,OAAAA,OAAY,aAAa;AAAA,wBAC7B,YAAW,WAAM,cAAN,YAAmB,aAAa;AAAA,wBAC3C,UAAS,WAAM,YAAN,YAAiB,aAAa;AAAA,sBACzC;AACA;AAAA,oBACF;AAAA,oBAEA,KAAK,UAAU;AAGb,kCAAY,MAAM;AAClB,yCAAmB,MAAM;AACzB,6CACE,MAAM;AACR,qCAAe,MAAM;AAIrB,4BAAM,aAAaF,KAAI,IAAI;AAC3B,mCAAa,SAAS,kBAAkB;AACxC,mCAAa,cAAc;AAAA,wBACzB,0BAA0B;AAAA,wBAC1B,4CACG,MAAO,UAAU,mBAAoB;AAAA,sBAC1C,CAAC;AAED;AAAA,oBACF;AAAA,oBAEA,KAAK;AAAA,oBACL,KAAK,mBAAmB;AACtB,iCAAW,QAAQ,KAAK;AACxB;AAAA,oBACF;AAAA,oBAEA,KAAK,SAAS;AACZ,iCAAW,QAAQ,KAAK;AACxB,yCAAmB;AACnB;AAAA,oBACF;AAAA,oBAEA,SAAS;AACP,4BAAM,kBAAyB;AAC/B,4BAAM,IAAI,MAAM,uBAAuB,eAAe,EAAE;AAAA,oBAC1D;AAAA,kBACF;AAAA,gBACF;AAAA;AAAA,gBAGA,MAAM,MAAM,YAAY;AACtB,wBAAM,oBACJ,cAAc,SAAS,IACnB,KAAK,UAAU,aAAa,IAC5B;AAGN,sBAAI,eACF;AACF,sBAAI,cAAc,IAAI,UAAU;AAC9B,wBACE,iBACA,qBAAqB;AAAA,oBAErB,cAAc,WAAW,GACzB;AACA,qCAAe;AAAA,oBACjB;AAAA;AAAA,sBAEE,cAAc,SAAS;AAAA,sBAEvB,gBAAgB,WAAW,cAAc;AAAA,sBACzC;AACA,qCAAe;AAAA,oBACjB;AAAA,kBACF;AAIA,sBACE,iBACA,YAAY,SAAS,MACpB,iBAAiB;AAAA,kBACfG,cAAa,cAAc,CAAC,qBAC/B;AACA,0BAAM,iBAAiB;AAAA,sBACrB;AAAA,sBACA,OAAO;AAAA,wBACL,MAAM;AAAA,wBACN,WAAW;AAAA,sBACb;AAAA,oBACF,CAAC;AACD,kCAAc;AAAA,kBAChB;AAGA,sBAAI;AACF,iCAAa;AAAA,sBACX,0BAA0B;AAAA,wBACxB;AAAA,wBACA,YAAY;AAAA,0BACV,4BAA4B;AAAA,0BAC5B,oBAAoB,EAAE,QAAQ,MAAM,SAAS;AAAA,0BAC7C,yBAAyB;AAAA,4BACvB,QAAQ,MAAM;AAAA,0BAChB;AAAA,0BACA,kBAAkB,aAAa;AAAA,0BAC/B,qBAAqB,aAAa;AAAA,0BAClC,yBACE,aAAa,UAAU,YAAY;AAAA,0BAErC,yBAAyB,UAAU;AAAA,0BACnC,6BACE,UAAU;AAAA;AAAA,0BAGZ,kCAAkC,CAAC,gBAAgB;AAAA,0BACnD,sBAAsB,aAAa;AAAA,0BACnC,yBAAyB,aAAa;AAAA,0BACtC,6BAA6B,UAAU;AAAA,0BACvC,8BACE,UAAU;AAAA,wBACd;AAAA,sBACF,CAAC;AAAA,oBACH;AAAA,kBACF,SAAS,OAAO;AAAA,kBAEhB,UAAE;AAEA,iCAAa,IAAI;AAAA,kBACnB;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,OAAO;AAAA,oBACP,+BAA+B;AAAA,oBAC/B,UAAU;AAAA,oBACV,SAAS;AAAA,oBACT,UAAU;AAAA,sBACR,GAAG;AAAA,sBACH,SAAS,2CAAa;AAAA,oBACxB;AAAA,oBACA;AAAA,oBACA,aAAa,iBAAiB;AAAA,kBAChC,CAAC;AAED,wBAAM,gBAAgB,sBAAsB,OAAO,SAAS;AAE5D,sBAAI,iBAAiB,QAAQ;AAC3B,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,OAAO;AAAA,sBACP,+BAA+B;AAAA,sBAC/B,UAAU;AAAA,sBACV,UAAU;AAAA,wBACR,GAAG;AAAA,wBACH,SAAS,2CAAa;AAAA,sBACxB;AAAA,oBACF,CAAC;AAED,yBAAK,YAAY;AAAA,kBACnB,OAAO;AAEL,wBAAIA,cAAa,YAAY;AAI3B,4BAAM,cAAc,iBAClB,iBAAiB,SAAS,CAC5B;AAEA,0BAAI,OAAO,YAAY,YAAY,UAAU;AAC3C,oCAAY,WAAW;AAAA,sBACzB,OAAO;AACL,oCAAY,QAAQ,KAAK;AAAA,0BACvB,MAAM;AAAA,0BACN,MAAM;AAAA,wBACR,CAAC;AAAA,sBACH;AAAA,oBACF,OAAO;AACL,uCAAiB;AAAA,wBACf,GAAG,mBAAmB;AAAA,0BACpB,MAAM;AAAA,0BACN,OAAO,wBAAU,CAAC;AAAA,0BAClB,WAAW;AAAA,0BACX,aAAa;AAAA,wBACf,CAAC;AAAA,sBACH;AAAA,oBACF;AAEA,0BAAM,WAAW;AAAA,sBACf,aAAa,cAAc;AAAA,sBAC3B;AAAA,sBACA,OAAO;AAAA,sBACP,UAAU;AAAA,sBACV,kBAAkB;AAAA,sBAClB,sBAAsB;AAAA,oBACxB,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAGA,cAAM,WAAW;AAAA,UACf,aAAa;AAAA,UACb,kBAAkB,CAAC;AAAA,UACnB,OAAO;AAAA,YACL,cAAc;AAAA,YACd,kBAAkB;AAAA,YAClB,aAAa;AAAA,UACf;AAAA,UACA,kBAAkB;AAAA,UAClB,UAAU;AAAA,UACV,sBAAsB;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF,CAAC,EAAE,MAAM,WAAS;AAEhB,WAAK;AAAA,QACH,IAAI,eAAe;AAAA,UACjB,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,SAAS,MAAM,CAAC;AAC3C,uBAAW,MAAM;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AACA,WAAK,YAAY;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK,oBAAoB;AAAA,EAClC;AAAA,EAEA,IAAI,gCAAgC;AAClC,WAAO,KAAK,wBAAwB;AAAA,EACtC;AAAA,EAEA,IAAI,OAAO;AACT,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,KAAK,iBAAiB;AAAA,EAC/B;AAAA,EAEA,IAAI,cAAc;AAChB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,YAAY;AAClB,UAAM,CAAC,SAAS,OAAO,IAAI,KAAK,WAAW,IAAI;AAC/C,SAAK,aAAa;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,aAA0C;AAC5C,WAAO;AAAA,MACL,KAAK,UAAU,EAAE;AAAA,QACf,IAAI,gBAA+C;AAAA,UACjD,UAAU,OAAO,YAAY;AAC3B,gBAAI,MAAM,SAAS,cAAc;AAC/B,yBAAW,QAAQ,MAAM,SAAS;AAAA,YACpC,WAAW,MAAM,SAAS,SAAS;AACjC,yBAAW,MAAM,MAAM,KAAK;AAAA,YAC9B;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,aAAyD;AAC3D,WAAO,0BAA0B,KAAK,UAAU,CAAC;AAAA,EACnD;AAAA,EAEQ,qBAAqB;AAAA,IAC3B,iBAAAG,mBAAkB,MAAM;AAAA;AAAA,IACxB,YAAY;AAAA,EACd,IAGI,CAAC,GAAqC;AACxC,QAAI,qBAAqB;AAEzB,UAAM,sBAAsB,IAAI,gBAG9B;AAAA,MACA,MAAM,UAAU,OAAO,YAA2B;AAChD,mBAAW,QAAQ,KAAK;AAExB,YAAI,MAAM,SAAS,cAAc;AAC/B,gCAAsB,MAAM;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,yBAAyB,IAAI,gBAGjC;AAAA,MACA,WAAW,OAAO,OAAO,eAAe;AACtC,cAAM,YAAY,MAAM;AACxB,gBAAQ,WAAW;AAAA,UACjB,KAAK,cAAc;AACjB,uBAAW,YAAQ,uCAAqB,QAAQ,MAAM,SAAS,CAAC;AAChE;AAAA,UACF;AAAA,UAEA,KAAK,6BAA6B;AAChC,uBAAW;AAAA,kBACT,uCAAqB,6BAA6B;AAAA,gBAChD,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,cAClB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,mBAAmB;AACtB,uBAAW;AAAA,kBACT,uCAAqB,mBAAmB;AAAA,gBACtC,YAAY,MAAM;AAAA,gBAClB,eAAe,MAAM;AAAA,cACvB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,aAAa;AAChB,uBAAW;AAAA,kBACT,uCAAqB,aAAa;AAAA,gBAChC,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,gBAChB,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,eAAe;AAClB,uBAAW;AAAA,kBACT,uCAAqB,eAAe;AAAA,gBAClC,YAAY,MAAM;AAAA,gBAClB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,SAAS;AACZ,uBAAW;AAAA,kBACT,uCAAqB,SAASA,iBAAgB,MAAM,KAAK,CAAC;AAAA,YAC5D;AACA;AAAA,UACF;AAAA,UAEA,KAAK,eAAe;AAClB,uBAAW;AAAA,kBACT,uCAAqB,eAAe;AAAA,gBAClC,cAAc,MAAM;AAAA,gBACpB,OAAO,YACH;AAAA,kBACE,cAAc,MAAM,MAAM;AAAA,kBAC1B,kBAAkB,MAAM,MAAM;AAAA,gBAChC,IACA;AAAA,gBACJ,aAAa,MAAM;AAAA,cACrB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,uBAAW;AAAA,kBACT,uCAAqB,kBAAkB;AAAA,gBACrC,cAAc,MAAM;AAAA,gBACpB,OAAO,YACH;AAAA,kBACE,cAAc,MAAM,MAAM;AAAA,kBAC1B,kBAAkB,MAAM,MAAM;AAAA,gBAChC,IACA;AAAA,cACN,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,SAAS;AACP,kBAAM,kBAAyB;AAC/B,kBAAM,IAAI,MAAM,uBAAuB,eAAe,EAAE;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK,WACT,YAAY,mBAAmB,EAC/B,YAAY,sBAAsB;AAAA,EACvC;AAAA,EAEA,yBACE,UACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAAA;AAAA,IACA;AAAA,EACF,IAII,CAAC,GACL;AACA,0BAAsB;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS,2BAA2B,SAAS;AAAA,QAC3C,aAAa;AAAA,QACb,mBAAmB;AAAA,MACrB,CAAC;AAAA,MACD,QAAQ,KAAK,aAAa,EAAE,MAAM,iBAAAA,kBAAiB,UAAU,CAAC;AAAA,IAChE,CAAC;AAAA,EACH;AAAA,EAEA,yBAAyB,UAA0B,MAAqB;AACtE,0BAAsB;AAAA,MACpB;AAAA,MACA,QAAQ,6BAAM;AAAA,MACd,YAAY,6BAAM;AAAA,MAClB,SAAS,2BAA2B,6BAAM,SAAS;AAAA,QACjD,aAAa;AAAA,MACf,CAAC;AAAA,MACD,QAAQ,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,aAAa,SAIV;AACD,UAAM,SAAS,KAAK,qBAAqB;AAAA,MACvC,iBAAiB,mCAAS;AAAA,MAC1B,WAAW,mCAAS;AAAA,IACtB,CAAC,EAAE,YAAY,IAAI,kBAAkB,CAAC;AAEtC,YAAO,mCAAS,QAAO,aAAa,mCAAS,KAAK,QAAQ,MAAM,IAAI;AAAA,EACtE;AAAA,EAEA,oBAAoB,QAA0B;AAC5C,WAAO;AAAA,MACL,KAAK,qBAAqB;AAAA,QACxB,iBAAiB,OAAO;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,qBAAqB;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAAA;AAAA,IACA;AAAA,EACF,IAII,CAAC,GAAa;AAChB,WAAO,IAAI;AAAA,MACT,KAAK,aAAa,EAAE,MAAM,iBAAAA,kBAAiB,UAAU,CAAC;AAAA,MACtD;AAAA,QACE;AAAA,QACA;AAAA,QACA,SAAS,uBAAuB,SAAS;AAAA,UACvC,aAAa;AAAA,UACb,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,qBAAqB,MAA+B;AAr4CtD,QAAAJ;AAs4CI,WAAO,IAAI,SAAS,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC,GAAG;AAAA,MACxE,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;AGn4CO,SAAS,aAAqD;AAAA,EACnE,YAAY;AAAA,EACZ,WAAW,EAAE,OAAAK,SAAQ,MAAc,IAAI,CAAC;AAC1C,IASI,CAAC,GAE+D;AAClE,MAAI,SAAS;AAEb,SAAO,MACL,IAAI,gBAA8D;AAAA,IAChE,MAAM,UAAU,OAAO,YAAY;AACjC,UAAI,MAAM,SAAS,eAAe;AAChC,YAAI,OAAO,SAAS,GAAG;AACrB,qBAAW,QAAQ,EAAE,MAAM,cAAc,WAAW,OAAO,CAAC;AAC5D,mBAAS;AAAA,QACX;AAEA,mBAAW,QAAQ,KAAK;AACxB;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,cAAc;AAC/B,mBAAW,QAAQ,KAAK;AACxB;AAAA,MACF;AAEA,gBAAU,MAAM;AAGhB,aAAO,OAAO,MAAM,IAAI,GAAG;AACzB,cAAM,kBAAkB,OAAO,OAAO,IAAI;AAC1C,cAAM,OAAO,OAAO,MAAM,GAAG,kBAAkB,CAAC;AAChD,mBAAW,QAAQ,EAAE,MAAM,cAAc,WAAW,KAAK,CAAC;AAC1D,iBAAS,OAAO,MAAM,kBAAkB,CAAC;AAEzC,YAAI,YAAY,GAAG;AACjB,gBAAMA,OAAM,SAAS;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACL;;;AC7CO,IAAM,iCAAiC,CAAC;AAAA,EAC7C;AAAA,EACA,YAAY,EAAE,iBAAiB,cAAc,WAAW;AAAA,EACxD;AAAA,EACA;AACF,MAKuB;AACrB,iBAAe,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,EACF,GAGG;AACD,WAAO,kBAAkB,MAAM,gBAAgB,EAAE,QAAQ,KAAK,CAAC,IAAI;AAAA,EACrE;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IAEtB,UAAU,kCAAc,MAAM;AAAA,IAC9B,SAAS,4BAAW,MAAM;AAAA,IAE1B,6BAA6B,MAAM;AAAA,IACnC,mBAAmB,MAAM;AAAA,IACzB,aAAa,MAAM;AAAA,IACnB,2BAA2B,MAAM;AAAA,IAEjC,MAAM,WACJ,QAC6D;AAC7D,YAAM,oBAAoB,MAAM,YAAY,EAAE,QAAQ,MAAM,WAAW,CAAC;AACxE,YAAM,aAAa,YAAY,MAAM,WAAW,iBAAiB;AACjE,aAAO,eACH,aAAa,EAAE,YAAY,QAAQ,mBAAmB,MAAM,CAAC,IAC7D,WAAW;AAAA,IACjB;AAAA,IAEA,MAAM,SACJ,QAC2D;AAC3D,YAAM,oBAAoB,MAAM,YAAY,EAAE,QAAQ,MAAM,SAAS,CAAC;AACtE,YAAM,WAAW,YAAY,MAAM,SAAS,iBAAiB;AAC7D,aAAO,aACH,WAAW,EAAE,UAAU,QAAQ,mBAAmB,MAAM,CAAC,IACzD,SAAS;AAAA,IACf;AAAA,EACF;AACF;;;ACjEA,IAAAC,oBAAiC;AAa1B,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF,GAIa;AACX,SAAO;AAAA,IACL,cAAc,SAAkC;AAC9C,UAAI,kBAAkB,QAAQ,WAAW,gBAAgB;AACvD,eAAO,eAAe,OAAO;AAAA,MAC/B;AAEA,UAAI,kBAAkB;AACpB,eAAO,iBAAiB,cAAc,OAAO;AAAA,MAC/C;AAEA,YAAM,IAAI,mCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,IACpE;AAAA,IAEA,mBAAmB,SAA2C;AAC5D,UAAI,uBAAuB,QAAQ,WAAW,qBAAqB;AACjE,eAAO,oBAAoB,OAAO;AAAA,MACpC;AAEA,UAAI,kBAAkB;AACpB,eAAO,iBAAiB,mBAAmB,OAAO;AAAA,MACpD;AAEA,YAAM,IAAI,mCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,IACzE;AAAA,EACF;AACF;;;ACjDA,IAAAC,oBAA6C;AAE7C,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,sBAAN,cAAkC,mCAAiB;AAAA,EAMxD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,qBAAqB,UAAU,0BAA0B,mBAAmB,KAAK,CAAC;AAAA,EAC9F,GAMG;AACD,UAAM,EAAE,WAAWH,QAAM,SAAS,WAAW,QAAQ,CAAC;AAlBxD,SAAkBG,QAAU;AAoB1B,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,OAAO,WAAW,OAA8C;AAC9D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AA3BoBE,OAAAD;;;ACPpB,IAAAE,oBAAiC;AAO1B,SAAS,oCACd,WACU;AACV,QAAM,WAAW,IAAI,wBAAwB;AAE7C,aAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,SAAS,GAAG;AACtD,aAAS,iBAAiB,EAAE,IAAI,SAAS,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;AAEA,IAAM,0BAAN,MAAkD;AAAA,EAAlD;AACE,SAAQ,YAAsC,CAAC;AAAA;AAAA,EAE/C,iBAAiB,EAAE,IAAI,SAAS,GAA6C;AAC3E,SAAK,UAAU,EAAE,IAAI;AAAA,EACvB;AAAA,EAEQ,YAAY,IAAsB;AACxC,UAAM,WAAW,KAAK,UAAU,EAAE;AAElC,QAAI,YAAY,MAAM;AACpB,YAAM,IAAI,oBAAoB;AAAA,QAC5B,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,oBAAoB,OAAO,KAAK,KAAK,SAAS;AAAA,MAChD,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,QACN,IACA,WACkB;AAClB,UAAM,QAAQ,GAAG,QAAQ,GAAG;AAE5B,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,mCAAiB;AAAA,QACzB,SAAS;AAAA,QACT;AAAA,QACA,SACE,WAAW,SAAS,qBAAqB,EAAE;AAAA,MAE/C,CAAC;AAAA,IACH;AAEA,WAAO,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,GAAG,MAAM,QAAQ,CAAC,CAAC;AAAA,EACjD;AAAA,EAEA,cAAc,IAA2B;AA5D3C,QAAAC,MAAA;AA6DI,UAAM,CAAC,YAAY,OAAO,IAAI,KAAK,QAAQ,IAAI,eAAe;AAC9D,UAAM,SAAQ,MAAAA,OAAA,KAAK,YAAY,UAAU,GAAE,kBAA7B,wBAAAA,MAA6C;AAE3D,QAAI,SAAS,MAAM;AACjB,YAAM,IAAI,mCAAiB,EAAE,SAAS,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACxE;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,mBAAmB,IAAoC;AAvEzD,QAAAA;AAwEI,UAAM,CAAC,YAAY,OAAO,IAAI,KAAK,QAAQ,IAAI,oBAAoB;AACnE,UAAM,WAAW,KAAK,YAAY,UAAU;AAE5C,UAAM,SAAQA,OAAA,SAAS,uBAAT,gBAAAA,KAAA,eAA8B;AAE5C,QAAI,SAAS,MAAM;AACjB,YAAM,IAAI,mCAAiB;AAAA,QACzB,SAAS;AAAA,QACT,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,IAAoC;AAChD,WAAO,KAAK,mBAAmB,EAAE;AAAA,EACnC;AACF;;;ACuBO,SAAS,KACdC,OAC8B;AAC9B,SAAOA;AACT;;;AC9GO,SAAS,iBAAiB,SAAmB,SAAmB;AACrE,MAAI,QAAQ,WAAW,QAAQ,QAAQ;AACrC,UAAM,IAAI;AAAA,MACR,+CAA+C,QAAQ,MAAM,uBAAuB,QAAQ,MAAM;AAAA,IACpG;AAAA,EACF;AAEA,SACE,WAAW,SAAS,OAAO,KAAK,UAAU,OAAO,IAAI,UAAU,OAAO;AAE1E;AAQA,SAAS,WAAW,SAAmB,SAAmB;AACxD,SAAO,QAAQ;AAAA,IACb,CAAC,aAAqB,OAAe,UACnC,cAAc,QAAQ,QAAQ,KAAK;AAAA,IACrC;AAAA,EACF;AACF;AAOA,SAAS,UAAU,QAAkB;AACnC,SAAO,KAAK,KAAK,WAAW,QAAQ,MAAM,CAAC;AAC7C;;;AC3CA,IAAAC,oBAIO;AA0CA,SAAS,kBACd,EAAE,UAAU,UAAU,GACtBC,UACU;AACV,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,MAAM,YAAY;AAnD5B,UAAAC;AAoDM,YAAM,cAAc,IAAI,YAAY;AAEpC,YAAM,cAAc,CAAC,YAA8B;AACjD,mBAAW;AAAA,UACT,YAAY;AAAA,gBACV,6CAA0B,qBAAqB,OAAO;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,kBAAkB,CAAC,YAAyB;AAChD,mBAAW;AAAA,UACT,YAAY;AAAA,gBACV,6CAA0B,gBAAgB,OAAO;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,YAAY,CAAC,iBAAyB;AAC1C,mBAAW;AAAA,UACT,YAAY,WAAO,6CAA0B,SAAS,YAAY,CAAC;AAAA,QACrE;AAAA,MACF;AAEA,YAAM,gBAAgB,OAAOC,YAAgB;AA5EnD,YAAAD,MAAA;AA6EQ,YAAI,SAA0B;AAE9B,yBAAiB,SAASC,SAAQ;AAChC,kBAAQ,MAAM,OAAO;AAAA,YACnB,KAAK,0BAA0B;AAC7B,yBAAW;AAAA,gBACT,YAAY;AAAA,sBACV,6CAA0B,qBAAqB;AAAA,oBAC7C,IAAI,MAAM,KAAK;AAAA,oBACf,MAAM;AAAA,oBACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,OAAO,GAAG,EAAE,CAAC;AAAA,kBACjD,CAAC;AAAA,gBACH;AAAA,cACF;AACA;AAAA,YACF;AAAA,YAEA,KAAK,wBAAwB;AAC3B,oBAAM,WAAUD,OAAA,MAAM,KAAK,MAAM,YAAjB,gBAAAA,KAA2B;AAE3C,mBAAI,mCAAS,UAAS,YAAU,aAAQ,SAAR,mBAAc,UAAS,MAAM;AAC3D,2BAAW;AAAA,kBACT,YAAY;AAAA,wBACV,6CAA0B,QAAQ,QAAQ,KAAK,KAAK;AAAA,kBACtD;AAAA,gBACF;AAAA,cACF;AAEA;AAAA,YACF;AAAA,YAEA,KAAK;AAAA,YACL,KAAK,8BAA8B;AACjC,uBAAS,MAAM;AACf;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAGA,iBAAW;AAAA,QACT,YAAY;AAAA,cACV,6CAA0B,0BAA0B;AAAA,YAClD;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI;AACF,cAAMD,SAAQ;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,mBAAWC,OAAA,MAAc,YAAd,OAAAA,OAAyB,GAAG,KAAK,EAAE;AAAA,MAChD,UAAE;AACA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AAAA,IAAC;AAAA,IAClB,SAAS;AAAA,IAAC;AAAA,EACZ,CAAC;AAED,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,IAClB;AAAA,EACF,CAAC;AACH;;;ACvJA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAE,oBAAqC;;;AC8C9B,SAAS,2BACd,YAAyC,CAAC,GACL;AACrC,QAAM,cAAc,IAAI,YAAY;AACpC,MAAI,qBAAqB;AAEzB,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,QAAuB;AAC3B,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ;AAAA,IACjD;AAAA,IAEA,MAAM,UAAU,SAAS,YAA2B;AAClD,iBAAW,QAAQ,YAAY,OAAO,OAAO,CAAC;AAE9C,4BAAsB;AAEtB,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ,OAAO;AACtD,UAAI,UAAU,UAAU,OAAO,YAAY,UAAU;AACnD,cAAM,UAAU,OAAO,OAAO;AAAA,MAChC;AAAA,IACF;AAAA,IAEA,MAAM,QAAuB;AAC3B,UAAI,UAAU,cAAc;AAC1B,cAAM,UAAU,aAAa,kBAAkB;AAAA,MACjD;AACA,UAAI,UAAU,SAAS;AACrB,cAAM,UAAU,QAAQ,kBAAkB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AD5BA,SAAS,qBACP,QAIA,WACA;AACA,SAAO,OACJ;AAAA,IACC,IAAI,gBAEF;AAAA,MACA,WAAW,OAAO,OAAO,eAAe;AA7DhD,YAAAC;AA+DU,YAAI,OAAO,UAAU,UAAU;AAC7B,qBAAW,QAAQ,KAAK;AACxB;AAAA,QACF;AAGA,YAAI,WAAW,OAAO;AAEpB,cAAI,MAAM,UAAU,wBAAwB;AAC1C;AAAA,eACEA,OAAA,MAAM,SAAN,gBAAAA,KAAY;AAAA,cACZ;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF;AAGA,8BAAsB,OAAO,UAAU;AAAA,MACzC;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD,YAAY,IAAI,kBAAkB,CAAC,EACnC;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,YAAQ,wCAAqB,QAAQ,KAAK,CAAC;AAAA,MACxD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AASO,SAAS,aACd,QAIA,WACA;AACA,SAAO,qBAAqB,QAAQ,SAAS,EAAE;AAAA,IAC7C,IAAI,kBAAkB;AAAA,EACxB;AACF;AAEO,SAAS,qBACd,QAIA,SAKA;AA7HF,MAAAA;AA8HE,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,mCAAS;AAAA,EACX,EAAE,YAAY,IAAI,kBAAkB,CAAC;AACrC,QAAM,OAAO,mCAAS;AACtB,QAAM,OAAO,mCAAS;AAEtB,QAAM,iBAAiB,OACnB,aAAa,KAAK,QAAQ,UAAU,IACpC;AAEJ,SAAO,IAAI,SAAS,gBAAgB;AAAA,IAClC,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,IACxB,YAAY,6BAAM;AAAA,IAClB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,MAC7C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAAS,oBACd,QAIA,SACA;AACA,UAAQ,WAAW,MAAM,qBAAqB,QAAQ,QAAQ,SAAS,CAAC;AAC1E;AAEA,SAAS,sBACP,OACA,YACA;AACA,MAAI,OAAO,MAAM,YAAY,UAAU;AACrC,eAAW,QAAQ,MAAM,OAAO;AAAA,EAClC,OAAO;AACL,UAAM,UAA4C,MAAM;AACxD,eAAW,QAAQ,SAAS;AAC1B,UAAI,KAAK,SAAS,QAAQ;AACxB,mBAAW,QAAQ,KAAK,IAAI;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AACF;;;AE3KA;AAAA;AAAA,6BAAAC;AAAA,EAAA,oBAAAC;AAAA,EAAA,4BAAAC;AAAA;AAAA,IAAAC,0BAAqD;AACrD,IAAAC,oBAAqC;AAcrC,SAASC,sBACP,QACA,WACA;AACA,QAAM,YAAY,kBAAkB;AAEpC,aAAO,8DAAqC,OAAO,OAAO,aAAa,EAAE,CAAC,EACvE;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,MAAM,UAAU,SAAS,YAA2B;AAClD,mBAAW,QAAQ,UAAU,QAAQ,KAAK,CAAC;AAAA,MAC7C;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD,YAAY,IAAI,kBAAkB,CAAC,EACnC;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,YAAQ,wCAAqB,QAAQ,KAAK,CAAC;AAAA,MACxD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AAEO,SAASC,cACd,QACA,WACA;AACA,SAAOD,sBAAqB,QAAQ,SAAS,EAAE;AAAA,IAC7C,IAAI,kBAAkB;AAAA,EACxB;AACF;AAEO,SAASE,sBACd,QACA,UAII,CAAC,GACL;AAxDF,MAAAC;AAyDE,QAAM,EAAE,MAAM,MAAM,UAAU,IAAI;AAClC,QAAM,aAAaH,sBAAqB,QAAQ,SAAS,EAAE;AAAA,IACzD,IAAI,kBAAkB;AAAA,EACxB;AACA,QAAM,iBAAiB,OACnB,aAAa,KAAK,QAAQ,UAAU,IACpC;AAEJ,SAAO,IAAI,SAAS,gBAAgB;AAAA,IAClC,SAAQG,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,IACxB,YAAY,6BAAM;AAAA,IAClB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,MAC7C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAASC,qBACd,QACA,SAIA;AACA,UAAQ,WAAW,MAAMJ,sBAAqB,QAAQ,QAAQ,SAAS,CAAC;AAC1E;AAEA,SAAS,oBAA8C;AACrD,MAAI,gBAAgB;AAEpB,SAAO,CAACK,UAAyB;AAC/B,QAAI,eAAe;AACjB,MAAAA,QAAOA,MAAK,UAAU;AACtB,UAAIA;AAAM,wBAAgB;AAAA,IAC5B;AACA,WAAOA;AAAA,EACT;AACF;;;AC/FA,IAAAC,oBAAgD;;;ACIzC,IAAM,iCAAiC,KAAK;;;ADI5C,IAAM,aAAN,MAAiB;AAAA,EAStB,cAAc;AARd,SAAQ,UAAU,IAAI,YAAY;AAElC,SAAQ,aAA0D;AAGlE,SAAQ,WAAoB;AAC5B,SAAQ,iBAAwC;AAG9C,UAAM,OAAO;AAEb,SAAK,SAAS,IAAI,eAAe;AAAA,MAC/B,OAAO,OAAM,eAAc;AACzB,aAAK,aAAa;AAGlB,YAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,eAAK,iBAAiB,WAAW,MAAM;AACrC,oBAAQ;AAAA,cACN;AAAA,YACF;AAAA,UACF,GAAG,8BAA8B;AAAA,QACnC;AAAA,MACF;AAAA,MACA,MAAM,gBAAc;AAAA,MAEpB;AAAA,MACA,QAAQ,YAAU;AAChB,aAAK,WAAW;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW,MAAM;AACtB,SAAK,WAAW;AAGhB,QAAI,KAAK,gBAAgB;AACvB,mBAAa,KAAK,cAAc;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,OAAO,OAAwB;AAC7B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW;AAAA,MACd,KAAK,QAAQ,WAAO,wCAAqB,QAAQ,CAAC,KAAK,CAAC,CAAC;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,wBAAwB,OAAwB;AAC9C,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW;AAAA,MACd,KAAK,QAAQ,WAAO,wCAAqB,uBAAuB,CAAC,KAAK,CAAC,CAAC;AAAA,IAC1E;AAAA,EACF;AACF;","names":["import_ui_utils","import_provider_utils","import_ui_utils","import_provider","import_provider","name","marker","symbol","_a","_a","name","import_api","name","attributes","_a","embedding","usage","embeddings","_a","usage","import_provider_utils","import_provider_utils","import_provider","name","marker","symbol","_a","text","import_provider","name","marker","symbol","_a","_a","import_provider_utils","import_provider","name","marker","symbol","_a","_a","import_provider","name","marker","symbol","_a","_a","import_provider","import_provider_utils","import_zod","import_zod","import_zod","import_zod","import_zod","import_zod","_a","import_provider","name","marker","symbol","_a","_a","tool","import_provider","import_provider_utils","import_ui_utils","_a","generateId","_a","span","_b","result","import_provider_utils","import_ui_utils","_a","_a","originalGenerateId","generateId","now","doStreamSpan","object","_a","error","import_provider_utils","import_provider","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_ui_utils","object","name","tool","text","text","import_provider_utils","import_ui_utils","tool","text","tool","originalGenerateId","generateId","_a","text","tool","span","_b","_c","_d","_e","_f","result","import_provider_utils","import_ui_utils","text","import_provider_utils","import_ui_utils","result","import_ui_utils","tool","originalGenerateId","now","generateId","_a","stepType","tool","doStreamSpan","getErrorMessage","delay","import_provider","import_provider","name","marker","symbol","_a","import_provider","_a","tool","import_ui_utils","process","_a","stream","import_ui_utils","_a","mergeIntoDataStream","toDataStream","toDataStreamResponse","import_provider_utils","import_ui_utils","toDataStreamInternal","toDataStream","toDataStreamResponse","_a","mergeIntoDataStream","text","import_ui_utils"]}
|
1
|
+
{"version":3,"sources":["../streams/index.ts","../core/index.ts","../core/data-stream/create-data-stream.ts","../core/util/prepare-response-headers.ts","../core/data-stream/create-data-stream-response.ts","../core/util/prepare-outgoing-http-headers.ts","../core/util/write-to-server-response.ts","../core/data-stream/pipe-data-stream-to-response.ts","../errors/invalid-argument-error.ts","../util/retry-with-exponential-backoff.ts","../util/delay.ts","../util/retry-error.ts","../core/prompt/prepare-retries.ts","../core/telemetry/assemble-operation-name.ts","../core/telemetry/get-base-telemetry-attributes.ts","../core/telemetry/get-tracer.ts","../core/telemetry/noop-tracer.ts","../core/telemetry/record-span.ts","../core/telemetry/select-telemetry-attributes.ts","../core/embed/embed.ts","../core/util/split-array.ts","../core/embed/embed-many.ts","../core/generate-image/generate-image.ts","../core/generate-object/generate-object.ts","../errors/no-object-generated-error.ts","../util/download-error.ts","../util/download.ts","../core/util/detect-image-mimetype.ts","../core/prompt/data-content.ts","../core/prompt/invalid-data-content-error.ts","../core/prompt/invalid-message-role-error.ts","../core/prompt/split-data-url.ts","../core/prompt/convert-to-language-model-prompt.ts","../core/prompt/prepare-call-settings.ts","../core/prompt/standardize-prompt.ts","../core/prompt/message.ts","../core/types/provider-metadata.ts","../core/types/json-value.ts","../core/prompt/content-part.ts","../core/prompt/tool-result-content.ts","../core/prompt/detect-prompt-type.ts","../core/prompt/attachments-to-parts.ts","../core/prompt/message-conversion-error.ts","../core/prompt/convert-to-core-messages.ts","../core/types/usage.ts","../core/generate-object/inject-json-instruction.ts","../core/generate-object/output-strategy.ts","../core/util/async-iterable-stream.ts","../core/generate-object/validate-object-generation-input.ts","../core/generate-object/stream-object.ts","../util/delayed-promise.ts","../util/create-resolvable-promise.ts","../core/util/create-stitchable-stream.ts","../core/util/now.ts","../core/generate-text/generate-text.ts","../errors/index.ts","../errors/invalid-tool-arguments-error.ts","../errors/no-such-tool-error.ts","../errors/tool-call-repair-error.ts","../errors/tool-execution-error.ts","../core/prompt/prepare-tools-and-tool-choice.ts","../core/util/is-non-empty-object.ts","../core/util/split-on-last-whitespace.ts","../core/util/remove-text-after-last-whitespace.ts","../core/generate-text/parse-tool-call.ts","../core/generate-text/to-response-messages.ts","../core/generate-text/output.ts","../core/generate-text/stream-text.ts","../core/util/merge-streams.ts","../core/generate-text/run-tools-transformation.ts","../core/generate-text/smooth-stream.ts","../core/middleware/wrap-language-model.ts","../core/registry/custom-provider.ts","../core/registry/no-such-provider-error.ts","../core/registry/provider-registry.ts","../core/tool/tool.ts","../core/util/cosine-similarity.ts","../streams/assistant-response.ts","../streams/langchain-adapter.ts","../streams/stream-callbacks.ts","../streams/llamaindex-adapter.ts","../streams/stream-data.ts","../util/constants.ts"],"sourcesContent":["// forwarding exports from ui-utils:\nexport {\n formatAssistantStreamPart,\n formatDataStreamPart,\n parseAssistantStreamPart,\n parseDataStreamPart,\n processDataStream,\n processTextStream,\n} from '@ai-sdk/ui-utils';\nexport type {\n AssistantMessage,\n AssistantStatus,\n Attachment,\n ChatRequest,\n ChatRequestOptions,\n CreateMessage,\n DataMessage,\n DataStreamPart,\n IdGenerator,\n JSONValue,\n Message,\n RequestOptions,\n ToolInvocation,\n UseAssistantOptions,\n} from '@ai-sdk/ui-utils';\n\nexport { generateId } from '@ai-sdk/provider-utils';\n\nexport * from '../core/index';\nexport * from '../errors/index';\n\nexport * from './assistant-response';\nexport * as LangChainAdapter from './langchain-adapter';\nexport * as LlamaIndexAdapter from './llamaindex-adapter';\nexport * from './stream-data';\n","export { jsonSchema } from '@ai-sdk/ui-utils';\nexport type { DeepPartial, Schema } from '@ai-sdk/ui-utils';\nexport * from './data-stream';\nexport * from './embed';\nexport * from './generate-image';\nexport * from './generate-object';\nexport * from './generate-text';\nexport * from './middleware';\nexport * from './prompt';\nexport * from './registry';\nexport * from './tool';\nexport * from './types';\nexport { cosineSimilarity } from './util/cosine-similarity';\n","import { DataStreamString, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function createDataStream({\n execute,\n onError = () => 'An error occurred.', // mask error messages for safety by default\n}: {\n execute: (dataStream: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n}): ReadableStream<DataStreamString> {\n let controller!: ReadableStreamDefaultController<string>;\n\n const ongoingStreamPromises: Promise<void>[] = [];\n\n const stream = new ReadableStream({\n start(controllerArg) {\n controller = controllerArg;\n },\n });\n\n function safeEnqueue(data: DataStreamString) {\n try {\n controller.enqueue(data);\n } catch (error) {\n // suppress errors when the stream has been closed\n }\n }\n\n try {\n const result = execute({\n writeData(data) {\n safeEnqueue(formatDataStreamPart('data', [data]));\n },\n writeMessageAnnotation(annotation) {\n safeEnqueue(formatDataStreamPart('message_annotations', [annotation]));\n },\n merge(streamArg) {\n ongoingStreamPromises.push(\n (async () => {\n const reader = streamArg.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n safeEnqueue(value);\n }\n })().catch(error => {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }),\n );\n },\n onError,\n });\n\n if (result) {\n ongoingStreamPromises.push(\n result.catch(error => {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }),\n );\n }\n } catch (error) {\n safeEnqueue(formatDataStreamPart('error', onError(error)));\n }\n\n // Wait until all ongoing streams are done. This approach enables merging\n // streams even after execute has returned, as long as there is still an\n // open merged stream. This is important to e.g. forward new streams and\n // from callbacks.\n const waitForStreams: Promise<void> = new Promise(async resolve => {\n while (ongoingStreamPromises.length > 0) {\n await ongoingStreamPromises.shift();\n }\n resolve();\n });\n\n waitForStreams.finally(() => {\n try {\n controller.close();\n } catch (error) {\n // suppress errors when the stream has been closed\n }\n });\n\n return stream;\n}\n","export function prepareResponseHeaders(\n headers: HeadersInit | undefined,\n {\n contentType,\n dataStreamVersion,\n }: { contentType: string; dataStreamVersion?: 'v1' | undefined },\n) {\n const responseHeaders = new Headers(headers ?? {});\n\n if (!responseHeaders.has('Content-Type')) {\n responseHeaders.set('Content-Type', contentType);\n }\n\n if (dataStreamVersion !== undefined) {\n responseHeaders.set('X-Vercel-AI-Data-Stream', dataStreamVersion);\n }\n\n return responseHeaders;\n}\n","import { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { createDataStream } from './create-data-stream';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function createDataStreamResponse({\n status,\n statusText,\n headers,\n execute,\n onError,\n}: ResponseInit & {\n execute: (dataStream: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n}): Response {\n return new Response(\n createDataStream({ execute, onError }).pipeThrough(new TextEncoderStream()),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n },\n );\n}\n","export function prepareOutgoingHttpHeaders(\n headers: HeadersInit | undefined,\n {\n contentType,\n dataStreamVersion,\n }: { contentType: string; dataStreamVersion?: 'v1' | undefined },\n) {\n const outgoingHeaders: Record<string, string | number | string[]> = {};\n\n if (headers != null) {\n for (const [key, value] of Object.entries(headers)) {\n outgoingHeaders[key] = value;\n }\n }\n\n if (outgoingHeaders['Content-Type'] == null) {\n outgoingHeaders['Content-Type'] = contentType;\n }\n\n if (dataStreamVersion !== undefined) {\n outgoingHeaders['X-Vercel-AI-Data-Stream'] = dataStreamVersion;\n }\n\n return outgoingHeaders;\n}\n","import { ServerResponse } from 'node:http';\n\n/**\n * Writes the content of a stream to a server response.\n */\nexport function writeToServerResponse({\n response,\n status,\n statusText,\n headers,\n stream,\n}: {\n response: ServerResponse;\n status?: number;\n statusText?: string;\n headers?: Record<string, string | number | string[]>;\n stream: ReadableStream<Uint8Array>;\n}): void {\n response.writeHead(status ?? 200, statusText, headers);\n\n const reader = stream.getReader();\n const read = async () => {\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n response.write(value);\n }\n } catch (error) {\n throw error;\n } finally {\n response.end();\n }\n };\n\n read();\n}\n","import { ServerResponse } from 'node:http';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport { createDataStream } from './create-data-stream';\nimport { DataStreamWriter } from './data-stream-writer';\n\nexport function pipeDataStreamToResponse(\n response: ServerResponse,\n {\n status,\n statusText,\n headers,\n execute,\n onError,\n }: ResponseInit & {\n execute: (writer: DataStreamWriter) => Promise<void> | void;\n onError?: (error: unknown) => string;\n },\n): void {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n stream: createDataStream({ execute, onError }).pipeThrough(\n new TextEncoderStream(),\n ),\n });\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidArgumentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidArgumentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly parameter: string;\n readonly value: unknown;\n\n constructor({\n parameter,\n value,\n message,\n }: {\n parameter: string;\n value: unknown;\n message: string;\n }) {\n super({\n name,\n message: `Invalid argument for parameter ${parameter}: ${message}`,\n });\n\n this.parameter = parameter;\n this.value = value;\n }\n\n static isInstance(error: unknown): error is InvalidArgumentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { APICallError } from '@ai-sdk/provider';\nimport { getErrorMessage, isAbortError } from '@ai-sdk/provider-utils';\nimport { delay } from './delay';\nimport { RetryError } from './retry-error';\n\nexport type RetryFunction = <OUTPUT>(\n fn: () => PromiseLike<OUTPUT>,\n) => PromiseLike<OUTPUT>;\n\n/**\nThe `retryWithExponentialBackoff` strategy retries a failed API call with an exponential backoff.\nYou can configure the maximum number of retries, the initial delay, and the backoff factor.\n */\nexport const retryWithExponentialBackoff =\n ({\n maxRetries = 2,\n initialDelayInMs = 2000,\n backoffFactor = 2,\n } = {}): RetryFunction =>\n async <OUTPUT>(f: () => PromiseLike<OUTPUT>) =>\n _retryWithExponentialBackoff(f, {\n maxRetries,\n delayInMs: initialDelayInMs,\n backoffFactor,\n });\n\nasync function _retryWithExponentialBackoff<OUTPUT>(\n f: () => PromiseLike<OUTPUT>,\n {\n maxRetries,\n delayInMs,\n backoffFactor,\n }: { maxRetries: number; delayInMs: number; backoffFactor: number },\n errors: unknown[] = [],\n): Promise<OUTPUT> {\n try {\n return await f();\n } catch (error) {\n if (isAbortError(error)) {\n throw error; // don't retry when the request was aborted\n }\n\n if (maxRetries === 0) {\n throw error; // don't wrap the error when retries are disabled\n }\n\n const errorMessage = getErrorMessage(error);\n const newErrors = [...errors, error];\n const tryNumber = newErrors.length;\n\n if (tryNumber > maxRetries) {\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,\n reason: 'maxRetriesExceeded',\n errors: newErrors,\n });\n }\n\n if (\n error instanceof Error &&\n APICallError.isInstance(error) &&\n error.isRetryable === true &&\n tryNumber <= maxRetries\n ) {\n await delay(delayInMs);\n return _retryWithExponentialBackoff(\n f,\n { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },\n newErrors,\n );\n }\n\n if (tryNumber === 1) {\n throw error; // don't wrap the error when a non-retryable error occurs on the first try\n }\n\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,\n reason: 'errorNotRetryable',\n errors: newErrors,\n });\n }\n}\n","export async function delay(delayInMs?: number): Promise<void> {\n return delayInMs === undefined\n ? Promise.resolve()\n : new Promise(resolve => setTimeout(resolve, delayInMs));\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_RetryError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport type RetryErrorReason =\n | 'maxRetriesExceeded'\n | 'errorNotRetryable'\n | 'abort';\n\nexport class RetryError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n // note: property order determines debugging output\n readonly reason: RetryErrorReason;\n readonly lastError: unknown;\n readonly errors: Array<unknown>;\n\n constructor({\n message,\n reason,\n errors,\n }: {\n message: string;\n reason: RetryErrorReason;\n errors: Array<unknown>;\n }) {\n super({ name, message });\n\n this.reason = reason;\n this.errors = errors;\n\n // separate our last error to make debugging via log easier:\n this.lastError = errors[errors.length - 1];\n }\n\n static isInstance(error: unknown): error is RetryError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport {\n RetryFunction,\n retryWithExponentialBackoff,\n} from '../../util/retry-with-exponential-backoff';\n\n/**\n * Validate and prepare retries.\n */\nexport function prepareRetries({\n maxRetries,\n}: {\n maxRetries: number | undefined;\n}): {\n maxRetries: number;\n retry: RetryFunction;\n} {\n if (maxRetries != null) {\n if (!Number.isInteger(maxRetries)) {\n throw new InvalidArgumentError({\n parameter: 'maxRetries',\n value: maxRetries,\n message: 'maxRetries must be an integer',\n });\n }\n\n if (maxRetries < 0) {\n throw new InvalidArgumentError({\n parameter: 'maxRetries',\n value: maxRetries,\n message: 'maxRetries must be >= 0',\n });\n }\n }\n\n const maxRetriesResult = maxRetries ?? 2;\n\n return {\n maxRetries: maxRetriesResult,\n retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult }),\n };\n}\n","import { TelemetrySettings } from './telemetry-settings';\n\nexport function assembleOperationName({\n operationId,\n telemetry,\n}: {\n operationId: string;\n telemetry?: TelemetrySettings;\n}) {\n return {\n // standardized operation and resource name:\n 'operation.name': `${operationId}${\n telemetry?.functionId != null ? ` ${telemetry.functionId}` : ''\n }`,\n 'resource.name': telemetry?.functionId,\n\n // detailed, AI SDK specific data:\n 'ai.operationId': operationId,\n 'ai.telemetry.functionId': telemetry?.functionId,\n };\n}\n","import { Attributes } from '@opentelemetry/api';\nimport { CallSettings } from '../prompt/call-settings';\nimport { TelemetrySettings } from './telemetry-settings';\n\nexport function getBaseTelemetryAttributes({\n model,\n settings,\n telemetry,\n headers,\n}: {\n model: { modelId: string; provider: string };\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n}): Attributes {\n return {\n 'ai.model.provider': model.provider,\n 'ai.model.id': model.modelId,\n\n // settings:\n ...Object.entries(settings).reduce((attributes, [key, value]) => {\n attributes[`ai.settings.${key}`] = value;\n return attributes;\n }, {} as Attributes),\n\n // add metadata as attributes:\n ...Object.entries(telemetry?.metadata ?? {}).reduce(\n (attributes, [key, value]) => {\n attributes[`ai.telemetry.metadata.${key}`] = value;\n return attributes;\n },\n {} as Attributes,\n ),\n\n // request headers\n ...Object.entries(headers ?? {}).reduce((attributes, [key, value]) => {\n if (value !== undefined) {\n attributes[`ai.request.headers.${key}`] = value;\n }\n return attributes;\n }, {} as Attributes),\n };\n}\n","import { Tracer, trace } from '@opentelemetry/api';\nimport { noopTracer } from './noop-tracer';\n\nexport function getTracer({\n isEnabled = false,\n tracer,\n}: {\n isEnabled?: boolean;\n tracer?: Tracer;\n} = {}): Tracer {\n if (!isEnabled) {\n return noopTracer;\n }\n\n if (tracer) {\n return tracer;\n }\n\n return trace.getTracer('ai');\n}\n","import { Span, SpanContext, Tracer } from '@opentelemetry/api';\n\n/**\n * Tracer implementation that does nothing (null object).\n */\nexport const noopTracer: Tracer = {\n startSpan(): Span {\n return noopSpan;\n },\n\n startActiveSpan<F extends (span: Span) => unknown>(\n name: unknown,\n arg1: unknown,\n arg2?: unknown,\n arg3?: F,\n ): ReturnType<any> {\n if (typeof arg1 === 'function') {\n return arg1(noopSpan);\n }\n if (typeof arg2 === 'function') {\n return arg2(noopSpan);\n }\n if (typeof arg3 === 'function') {\n return arg3(noopSpan);\n }\n },\n};\n\nconst noopSpan: Span = {\n spanContext() {\n return noopSpanContext;\n },\n setAttribute() {\n return this;\n },\n setAttributes() {\n return this;\n },\n addEvent() {\n return this;\n },\n addLink() {\n return this;\n },\n addLinks() {\n return this;\n },\n setStatus() {\n return this;\n },\n updateName() {\n return this;\n },\n end() {\n return this;\n },\n isRecording() {\n return false;\n },\n recordException() {\n return this;\n },\n};\n\nconst noopSpanContext: SpanContext = {\n traceId: '',\n spanId: '',\n traceFlags: 0,\n};\n","import { Attributes, Span, Tracer, SpanStatusCode } from '@opentelemetry/api';\n\nexport function recordSpan<T>({\n name,\n tracer,\n attributes,\n fn,\n endWhenDone = true,\n}: {\n name: string;\n tracer: Tracer;\n attributes: Attributes;\n fn: (span: Span) => Promise<T>;\n endWhenDone?: boolean;\n}) {\n return tracer.startActiveSpan(name, { attributes }, async span => {\n try {\n const result = await fn(span);\n\n if (endWhenDone) {\n span.end();\n }\n\n return result;\n } catch (error) {\n try {\n if (error instanceof Error) {\n span.recordException({\n name: error.name,\n message: error.message,\n stack: error.stack,\n });\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message,\n });\n } else {\n span.setStatus({ code: SpanStatusCode.ERROR });\n }\n } finally {\n // always stop the span when there is an error:\n span.end();\n }\n\n throw error;\n }\n });\n}\n","import type { Attributes, AttributeValue } from '@opentelemetry/api';\nimport type { TelemetrySettings } from './telemetry-settings';\n\nexport function selectTelemetryAttributes({\n telemetry,\n attributes,\n}: {\n telemetry?: TelemetrySettings;\n attributes: {\n [attributeKey: string]:\n | AttributeValue\n | { input: () => AttributeValue | undefined }\n | { output: () => AttributeValue | undefined }\n | undefined;\n };\n}): Attributes {\n // when telemetry is disabled, return an empty object to avoid serialization overhead:\n if (telemetry?.isEnabled !== true) {\n return {};\n }\n\n return Object.entries(attributes).reduce((attributes, [key, value]) => {\n if (value === undefined) {\n return attributes;\n }\n\n // input value, check if it should be recorded:\n if (\n typeof value === 'object' &&\n 'input' in value &&\n typeof value.input === 'function'\n ) {\n // default to true:\n if (telemetry?.recordInputs === false) {\n return attributes;\n }\n\n const result = value.input();\n\n return result === undefined\n ? attributes\n : { ...attributes, [key]: result };\n }\n\n // output value, check if it should be recorded:\n if (\n typeof value === 'object' &&\n 'output' in value &&\n typeof value.output === 'function'\n ) {\n // default to true:\n if (telemetry?.recordOutputs === false) {\n return attributes;\n }\n\n const result = value.output();\n\n return result === undefined\n ? attributes\n : { ...attributes, [key]: result };\n }\n\n // value is an attribute value already:\n return { ...attributes, [key]: value };\n }, {});\n}\n","import { prepareRetries } from '../prompt/prepare-retries';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { EmbeddingModel } from '../types';\nimport { EmbedResult } from './embed-result';\n\n/**\nEmbed a value using an embedding model. The type of the value is defined by the embedding model.\n\n@param model - The embedding model to use.\n@param value - The value that should be embedded.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the embedding, the value, and additional information.\n */\nexport async function embed<VALUE>({\n model,\n value,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n}: {\n /**\nThe embedding model to use.\n */\n model: EmbeddingModel<VALUE>;\n\n /**\nThe value that should be embedded.\n */\n value: VALUE;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n\n /**\n * Optional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n}): Promise<EmbedResult<VALUE>> {\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.embed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.embed', telemetry }),\n ...baseTelemetryAttributes,\n 'ai.value': { input: () => JSON.stringify(value) },\n },\n }),\n tracer,\n fn: async span => {\n const { embedding, usage, rawResponse } = await retry(() =>\n // nested spans to align with the embedMany telemetry data:\n recordSpan({\n name: 'ai.embed.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embed.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': { input: () => [JSON.stringify(value)] },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values: [value],\n abortSignal,\n headers,\n });\n\n const embedding = modelResponse.embeddings[0];\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n modelResponse.embeddings.map(embedding =>\n JSON.stringify(embedding),\n ),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return {\n embedding,\n usage,\n rawResponse: modelResponse.rawResponse,\n };\n },\n }),\n );\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embedding': { output: () => JSON.stringify(embedding) },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return new DefaultEmbedResult({ value, embedding, usage, rawResponse });\n },\n });\n}\n\nclass DefaultEmbedResult<VALUE> implements EmbedResult<VALUE> {\n readonly value: EmbedResult<VALUE>['value'];\n readonly embedding: EmbedResult<VALUE>['embedding'];\n readonly usage: EmbedResult<VALUE>['usage'];\n readonly rawResponse: EmbedResult<VALUE>['rawResponse'];\n\n constructor(options: {\n value: EmbedResult<VALUE>['value'];\n embedding: EmbedResult<VALUE>['embedding'];\n usage: EmbedResult<VALUE>['usage'];\n rawResponse?: EmbedResult<VALUE>['rawResponse'];\n }) {\n this.value = options.value;\n this.embedding = options.embedding;\n this.usage = options.usage;\n this.rawResponse = options.rawResponse;\n }\n}\n","/**\n * Splits an array into chunks of a specified size.\n *\n * @template T - The type of elements in the array.\n * @param {T[]} array - The array to split.\n * @param {number} chunkSize - The size of each chunk.\n * @returns {T[][]} - A new array containing the chunks.\n */\nexport function splitArray<T>(array: T[], chunkSize: number): T[][] {\n if (chunkSize <= 0) {\n throw new Error('chunkSize must be greater than 0');\n }\n\n const result = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n result.push(array.slice(i, i + chunkSize));\n }\n\n return result;\n}\n","import { prepareRetries } from '../prompt/prepare-retries';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { Embedding, EmbeddingModel } from '../types';\nimport { splitArray } from '../util/split-array';\nimport { EmbedManyResult } from './embed-many-result';\n\n/**\nEmbed several values using an embedding model. The type of the value is defined\nby the embedding model.\n\n`embedMany` automatically splits large requests into smaller chunks if the model\nhas a limit on how many embeddings can be generated in a single call.\n\n@param model - The embedding model to use.\n@param values - The values that should be embedded.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the embeddings, the value, and additional information.\n */\nexport async function embedMany<VALUE>({\n model,\n values,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n}: {\n /**\nThe embedding model to use.\n */\n model: EmbeddingModel<VALUE>;\n\n /**\nThe values that should be embedded.\n */\n values: Array<VALUE>;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n\n /**\n * Optional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n}): Promise<EmbedManyResult<VALUE>> {\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.embedMany',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.embedMany', telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => values.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async span => {\n const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;\n\n // the model has not specified limits on\n // how many embeddings can be generated in a single call\n if (maxEmbeddingsPerCall == null) {\n const { embeddings, usage } = await retry(() => {\n // nested spans to align with the embedMany telemetry data:\n return recordSpan({\n name: 'ai.embedMany.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embedMany.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => values.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values,\n abortSignal,\n headers,\n });\n\n const embeddings = modelResponse.embeddings;\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return { embeddings, usage };\n },\n });\n });\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return new DefaultEmbedManyResult({ values, embeddings, usage });\n }\n\n // split the values into chunks that are small enough for the model:\n const valueChunks = splitArray(values, maxEmbeddingsPerCall);\n\n // serially embed the chunks:\n const embeddings: Array<Embedding> = [];\n let tokens = 0;\n\n for (const chunk of valueChunks) {\n const { embeddings: responseEmbeddings, usage } = await retry(() => {\n // nested spans to align with the embedMany telemetry data:\n return recordSpan({\n name: 'ai.embedMany.doEmbed',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.embedMany.doEmbed',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.values': {\n input: () => chunk.map(value => JSON.stringify(value)),\n },\n },\n }),\n tracer,\n fn: async doEmbedSpan => {\n const modelResponse = await model.doEmbed({\n values: chunk,\n abortSignal,\n headers,\n });\n\n const embeddings = modelResponse.embeddings;\n const usage = modelResponse.usage ?? { tokens: NaN };\n\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': usage.tokens,\n },\n }),\n );\n\n return { embeddings, usage };\n },\n });\n });\n\n embeddings.push(...responseEmbeddings);\n tokens += usage.tokens;\n }\n\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.embeddings': {\n output: () =>\n embeddings.map(embedding => JSON.stringify(embedding)),\n },\n 'ai.usage.tokens': tokens,\n },\n }),\n );\n\n return new DefaultEmbedManyResult({\n values,\n embeddings,\n usage: { tokens },\n });\n },\n });\n}\n\nclass DefaultEmbedManyResult<VALUE> implements EmbedManyResult<VALUE> {\n readonly values: EmbedManyResult<VALUE>['values'];\n readonly embeddings: EmbedManyResult<VALUE>['embeddings'];\n readonly usage: EmbedManyResult<VALUE>['usage'];\n\n constructor(options: {\n values: EmbedManyResult<VALUE>['values'];\n embeddings: EmbedManyResult<VALUE>['embeddings'];\n usage: EmbedManyResult<VALUE>['usage'];\n }) {\n this.values = options.values;\n this.embeddings = options.embeddings;\n this.usage = options.usage;\n }\n}\n","import { ImageModelV1, JSONValue } from '@ai-sdk/provider';\nimport { convertBase64ToUint8Array } from '@ai-sdk/provider-utils';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { GeneratedImage, GenerateImageResult } from './generate-image-result';\n\n/**\nGenerates images using an image model.\n\n@param model - The image model to use.\n@param prompt - The prompt that should be used to generate the image.\n@param n - Number of images to generate. Default: 1.\n@param size - Size of the images to generate. Must have the format `{width}x{height}`.\n@param providerOptions - Additional provider-specific options that are passed through to the provider\nas body parameters.\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@returns A result object that contains the generated images.\n */\nexport async function generateImage({\n model,\n prompt,\n n,\n size,\n providerOptions,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n}: {\n /**\nThe image model to use.\n */\n model: ImageModelV1;\n\n /**\nThe prompt that should be used to generate the image.\n */\n prompt: string;\n\n /**\nNumber of images to generate.\n */\n n?: number;\n\n /**\nSize of the images to generate. Must have the format `{width}x{height}`.\n */\n size?: `${number}x${number}`;\n\n /**\nAdditional provider-specific options that are passed through to the provider\nas body parameters.\n\nThe outer record is keyed by the provider name, and the inner\nrecord is keyed by the provider-specific metadata key.\n```ts\n{\n \"openai\": {\n \"style\": \"vivid\"\n }\n}\n```\n */\n providerOptions?: Record<string, Record<string, JSONValue>>;\n\n /**\nMaximum number of retries per embedding model call. Set to 0 to disable retries.\n\n@default 2\n */\n maxRetries?: number;\n\n /**\nAbort signal.\n */\n abortSignal?: AbortSignal;\n\n /**\nAdditional headers to include in the request.\nOnly applicable for HTTP-based providers.\n */\n headers?: Record<string, string>;\n}): Promise<GenerateImageResult> {\n const { retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const { images } = await retry(() =>\n model.doGenerate({\n prompt,\n n: n ?? 1,\n abortSignal,\n headers,\n size,\n providerOptions: providerOptions ?? {},\n }),\n );\n\n return new DefaultGenerateImageResult({ base64Images: images });\n}\n\nclass DefaultGenerateImageResult implements GenerateImageResult {\n readonly images: Array<GeneratedImage>;\n\n constructor(options: { base64Images: Array<string> }) {\n this.images = options.base64Images.map(base64 => ({\n base64,\n get uint8Array() {\n return convertBase64ToUint8Array(this.base64);\n },\n }));\n }\n\n get image() {\n return this.images[0];\n }\n}\n","import { JSONValue } from '@ai-sdk/provider';\nimport { createIdGenerator, safeParseJSON } from '@ai-sdk/provider-utils';\nimport { Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport {\n CallWarning,\n FinishReason,\n LanguageModel,\n LogProbs,\n ProviderMetadata,\n} from '../types';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { calculateLanguageModelUsage } from '../types/usage';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { GenerateObjectResult } from './generate-object-result';\nimport { injectJsonInstruction } from './inject-json-instruction';\nimport { getOutputStrategy } from './output-strategy';\nimport { validateObjectGenerationInput } from './validate-object-generation-input';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });\n\n/**\nGenerate a structured, typed object for a given prompt and schema using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@returns\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<OBJECT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output?: 'object' | undefined;\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe schema of the object that the model should generate.\n */\n schema: z.Schema<OBJECT, z.ZodTypeDef, any> | Schema<OBJECT>;\n\n /**\nOptional name of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<OBJECT>>;\n/**\nGenerate an array with structured, typed elements for a given prompt and element schema using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@return\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<ELEMENT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'array';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe element schema of the array that the model should generate.\n */\n schema: z.Schema<ELEMENT, z.ZodTypeDef, any> | Schema<ELEMENT>;\n\n /**\nOptional name of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<Array<ELEMENT>>>;\n/**\nGenerate a value from an enum (limited list of string values) using a language model.\n\nThis function does not stream the output.\n\n@return\nA result object that contains the generated value, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject<ENUM extends string>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'enum';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe enum values that the model should use.\n */\n enum: Array<ENUM>;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<ENUM>>;\n/**\nGenerate JSON with any schema for a given prompt using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamObject` instead.\n\n@returns\nA result object that contains the generated object, the finish reason, the token usage, and additional information.\n */\nexport async function generateObject(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'no-schema';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe mode to use for object generation. Must be \"json\" for no-schema output.\n */\n mode?: 'json';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n },\n): Promise<GenerateObjectResult<JSONValue>>;\nexport async function generateObject<SCHEMA, RESULT>({\n model,\n enum: enumValues, // rename bc enum is reserved by typescript\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = 'object',\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n ...settings\n}: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n /**\n * The expected structure of the output.\n *\n * - 'object': Generate a single object that conforms to the schema.\n * - 'array': Generate an array of objects that conform to the schema.\n * - 'no-schema': Generate any JSON object. No schema is specified.\n *\n * Default is 'object' if not specified.\n */\n output?: 'object' | 'array' | 'enum' | 'no-schema';\n\n model: LanguageModel;\n enum?: Array<SCHEMA>;\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n schemaName?: string;\n schemaDescription?: string;\n mode?: 'auto' | 'json' | 'tool';\n experimental_telemetry?: TelemetrySettings;\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): Promise<GenerateObjectResult<RESULT>> {\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n enumValues,\n });\n\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const outputStrategy = getOutputStrategy({\n output,\n schema: inputSchema,\n enumValues,\n });\n\n // automatically set mode to 'json' for no-schema output\n if (outputStrategy.type === 'no-schema' && mode === undefined) {\n mode = 'json';\n }\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.generateObject',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.schema':\n outputStrategy.jsonSchema != null\n ? { input: () => JSON.stringify(outputStrategy.jsonSchema) }\n : undefined,\n 'ai.schema.name': schemaName,\n 'ai.schema.description': schemaDescription,\n 'ai.settings.output': outputStrategy.type,\n 'ai.settings.mode': mode,\n },\n }),\n tracer,\n fn: async span => {\n // use the default provider mode when the mode is set to 'auto' or unspecified\n if (mode === 'auto' || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n\n let result: string;\n let finishReason: FinishReason;\n let usage: Parameters<typeof calculateLanguageModelUsage>[0];\n let warnings: CallWarning[] | undefined;\n let rawResponse: { headers?: Record<string, string> } | undefined;\n let response: LanguageModelResponseMetadata;\n let request: LanguageModelRequestMetadata;\n let logprobs: LogProbs | undefined;\n let resultProviderMetadata: ProviderMetadata | undefined;\n\n switch (mode) {\n case 'json': {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system:\n outputStrategy.jsonSchema == null\n ? injectJsonInstruction({ prompt: system })\n : model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema,\n }),\n prompt,\n messages,\n },\n tools: undefined,\n });\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n const generateResult = await retry(() =>\n recordSpan({\n name: 'ai.generateObject.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => standardizedPrompt.type,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode: {\n type: 'object-json',\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription,\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n if (result.text === undefined) {\n throw new NoObjectGeneratedError({\n message:\n 'No object generated: the model did not return a response.',\n response: responseData,\n usage: calculateLanguageModelUsage(result.usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.object': { output: () => result.text },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens':\n result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.prompt_tokens': result.usage.promptTokens,\n 'gen_ai.usage.completion_tokens':\n result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, objectText: result.text, responseData };\n },\n }),\n );\n\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = generateResult.request ?? {};\n response = generateResult.responseData;\n\n break;\n }\n\n case 'tool': {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: undefined,\n });\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n const inputFormat = standardizedPrompt.type;\n\n const generateResult = await retry(() =>\n recordSpan({\n name: 'ai.generateObject.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateObject.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => inputFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode: {\n type: 'object-tool',\n tool: {\n type: 'function',\n name: schemaName ?? 'json',\n description:\n schemaDescription ?? 'Respond with a JSON object.',\n parameters: outputStrategy.jsonSchema!,\n },\n },\n ...prepareCallSettings(settings),\n inputFormat,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n const objectText = result.toolCalls?.[0]?.args;\n\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n if (objectText === undefined) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: the tool was not called.',\n response: responseData,\n usage: calculateLanguageModelUsage(result.usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.object': { output: () => objectText },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens':\n result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.input_tokens': result.usage.promptTokens,\n 'gen_ai.usage.output_tokens':\n result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, objectText, responseData };\n },\n }),\n );\n\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = generateResult.request ?? {};\n response = generateResult.responseData;\n\n break;\n }\n\n case undefined: {\n throw new Error(\n 'Model does not have a default object generation mode.',\n );\n }\n\n default: {\n const _exhaustiveCheck: never = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n\n const parseResult = safeParseJSON({ text: result });\n\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: could not parse the response.',\n cause: parseResult.error,\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n });\n }\n\n const validationResult = outputStrategy.validateFinalResult(\n parseResult.value,\n {\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n },\n );\n\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text: result,\n response,\n usage: calculateLanguageModelUsage(usage),\n });\n }\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.object': {\n output: () => JSON.stringify(validationResult.value),\n },\n\n 'ai.usage.promptTokens': usage.promptTokens,\n 'ai.usage.completionTokens': usage.completionTokens,\n },\n }),\n );\n\n return new DefaultGenerateObjectResult({\n object: validationResult.value,\n finishReason,\n usage: calculateLanguageModelUsage(usage),\n warnings,\n request,\n response: {\n ...response,\n headers: rawResponse?.headers,\n },\n logprobs,\n providerMetadata: resultProviderMetadata,\n });\n },\n });\n}\n\nclass DefaultGenerateObjectResult<T> implements GenerateObjectResult<T> {\n readonly object: GenerateObjectResult<T>['object'];\n readonly finishReason: GenerateObjectResult<T>['finishReason'];\n readonly usage: GenerateObjectResult<T>['usage'];\n readonly warnings: GenerateObjectResult<T>['warnings'];\n readonly logprobs: GenerateObjectResult<T>['logprobs'];\n readonly experimental_providerMetadata: GenerateObjectResult<T>['experimental_providerMetadata'];\n readonly response: GenerateObjectResult<T>['response'];\n readonly request: GenerateObjectResult<T>['request'];\n\n constructor(options: {\n object: GenerateObjectResult<T>['object'];\n finishReason: GenerateObjectResult<T>['finishReason'];\n usage: GenerateObjectResult<T>['usage'];\n warnings: GenerateObjectResult<T>['warnings'];\n logprobs: GenerateObjectResult<T>['logprobs'];\n providerMetadata: GenerateObjectResult<T>['experimental_providerMetadata'];\n response: GenerateObjectResult<T>['response'];\n request: GenerateObjectResult<T>['request'];\n }) {\n this.object = options.object;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.experimental_providerMetadata = options.providerMetadata;\n this.response = options.response;\n this.request = options.request;\n this.logprobs = options.logprobs;\n }\n\n toJsonResponse(init?: ResponseInit): Response {\n return new Response(JSON.stringify(this.object), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'application/json; charset=utf-8',\n }),\n });\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\nimport { LanguageModelResponseMetadata } from '../core/types/language-model-response-metadata';\nimport { LanguageModelUsage } from '../core/types/usage';\n\nconst name = 'AI_NoObjectGeneratedError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\nThrown when no object could be generated. This can have several causes:\n\n- The model failed to generate a response.\n- The model generated a response that could not be parsed.\n- The model generated a response that could not be validated against the schema.\n\nThe error contains the following properties:\n\n- `text`: The text that was generated by the model. This can be the raw text or the tool call text, depending on the model.\n */\nexport class NoObjectGeneratedError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n /**\n The text that was generated by the model. This can be the raw text or the tool call text, depending on the model.\n */\n readonly text: string | undefined;\n\n /**\n The response metadata.\n */\n readonly response: LanguageModelResponseMetadata | undefined;\n\n /**\n The usage of the model.\n */\n readonly usage: LanguageModelUsage | undefined;\n\n constructor({\n message = 'No object generated.',\n cause,\n text,\n response,\n usage,\n }: {\n message?: string;\n cause?: Error;\n text?: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n }) {\n super({ name, message, cause });\n\n this.text = text;\n this.response = response;\n this.usage = usage;\n }\n\n static isInstance(error: unknown): error is NoObjectGeneratedError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n\nexport function verifyNoObjectGeneratedError(\n error: unknown,\n expected: {\n message: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n) {\n expect(NoObjectGeneratedError.isInstance(error)).toBeTruthy();\n const noObjectGeneratedError = error as NoObjectGeneratedError;\n expect(noObjectGeneratedError.message).toStrictEqual(expected.message);\n expect(noObjectGeneratedError.response).toStrictEqual(expected.response);\n expect(noObjectGeneratedError.usage).toStrictEqual(expected.usage);\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_DownloadError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class DownloadError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly url: string;\n readonly statusCode?: number;\n readonly statusText?: string;\n\n constructor({\n url,\n statusCode,\n statusText,\n cause,\n message = cause == null\n ? `Failed to download ${url}: ${statusCode} ${statusText}`\n : `Failed to download ${url}: ${cause}`,\n }: {\n url: string;\n statusCode?: number;\n statusText?: string;\n message?: string;\n cause?: unknown;\n }) {\n super({ name, message, cause });\n\n this.url = url;\n this.statusCode = statusCode;\n this.statusText = statusText;\n }\n\n static isInstance(error: unknown): error is DownloadError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { DownloadError } from './download-error';\n\nexport async function download({\n url,\n fetchImplementation = fetch,\n}: {\n url: URL;\n fetchImplementation?: typeof fetch;\n}): Promise<{\n data: Uint8Array;\n mimeType: string | undefined;\n}> {\n const urlText = url.toString();\n try {\n const response = await fetchImplementation(urlText);\n\n if (!response.ok) {\n throw new DownloadError({\n url: urlText,\n statusCode: response.status,\n statusText: response.statusText,\n });\n }\n\n return {\n data: new Uint8Array(await response.arrayBuffer()),\n mimeType: response.headers.get('content-type') ?? undefined,\n };\n } catch (error) {\n if (DownloadError.isInstance(error)) {\n throw error;\n }\n\n throw new DownloadError({ url: urlText, cause: error });\n }\n}\n","const mimeTypeSignatures = [\n { mimeType: 'image/gif' as const, bytes: [0x47, 0x49, 0x46] },\n { mimeType: 'image/png' as const, bytes: [0x89, 0x50, 0x4e, 0x47] },\n { mimeType: 'image/jpeg' as const, bytes: [0xff, 0xd8] },\n { mimeType: 'image/webp' as const, bytes: [0x52, 0x49, 0x46, 0x46] },\n];\n\nexport function detectImageMimeType(\n image: Uint8Array,\n): 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | undefined {\n for (const { bytes, mimeType } of mimeTypeSignatures) {\n if (\n image.length >= bytes.length &&\n bytes.every((byte, index) => image[index] === byte)\n ) {\n return mimeType;\n }\n }\n\n return undefined;\n}\n","import {\n convertBase64ToUint8Array,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\nimport { InvalidDataContentError } from './invalid-data-content-error';\nimport { z } from 'zod';\n\n/**\nData content. Can either be a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer.\n */\nexport type DataContent = string | Uint8Array | ArrayBuffer | Buffer;\n\n/**\n@internal\n */\nexport const dataContentSchema: z.ZodType<DataContent> = z.union([\n z.string(),\n z.instanceof(Uint8Array),\n z.instanceof(ArrayBuffer),\n z.custom(\n // Buffer might not be available in some environments such as CloudFlare:\n (value: unknown): value is Buffer =>\n globalThis.Buffer?.isBuffer(value) ?? false,\n { message: 'Must be a Buffer' },\n ),\n]);\n\n/**\nConverts data content to a base64-encoded string.\n\n@param content - Data content to convert.\n@returns Base64-encoded string.\n*/\nexport function convertDataContentToBase64String(content: DataContent): string {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content instanceof ArrayBuffer) {\n return convertUint8ArrayToBase64(new Uint8Array(content));\n }\n\n return convertUint8ArrayToBase64(content);\n}\n\n/**\nConverts data content to a Uint8Array.\n\n@param content - Data content to convert.\n@returns Uint8Array.\n */\nexport function convertDataContentToUint8Array(\n content: DataContent,\n): Uint8Array {\n if (content instanceof Uint8Array) {\n return content;\n }\n\n if (typeof content === 'string') {\n try {\n return convertBase64ToUint8Array(content);\n } catch (error) {\n throw new InvalidDataContentError({\n message:\n 'Invalid data content. Content string is not a base64-encoded media.',\n content,\n cause: error,\n });\n }\n }\n\n if (content instanceof ArrayBuffer) {\n return new Uint8Array(content);\n }\n\n throw new InvalidDataContentError({ content });\n}\n\n/**\n * Converts a Uint8Array to a string of text.\n *\n * @param uint8Array - The Uint8Array to convert.\n * @returns The converted string.\n */\nexport function convertUint8ArrayToText(uint8Array: Uint8Array): string {\n try {\n return new TextDecoder().decode(uint8Array);\n } catch (error) {\n throw new Error('Error decoding Uint8Array to text');\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidDataContentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidDataContentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly content: unknown;\n\n constructor({\n content,\n cause,\n message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`,\n }: {\n content: unknown;\n cause?: unknown;\n message?: string;\n }) {\n super({ name, message, cause });\n\n this.content = content;\n }\n\n static isInstance(error: unknown): error is InvalidDataContentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidMessageRoleError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidMessageRoleError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly role: string;\n\n constructor({\n role,\n message = `Invalid message role: '${role}'. Must be one of: \"system\", \"user\", \"assistant\", \"tool\".`,\n }: {\n role: string;\n message?: string;\n }) {\n super({ name, message });\n\n this.role = role;\n }\n\n static isInstance(error: unknown): error is InvalidMessageRoleError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","export function splitDataUrl(dataUrl: string): {\n mimeType: string | undefined;\n base64Content: string | undefined;\n} {\n try {\n const [header, base64Content] = dataUrl.split(',');\n return {\n mimeType: header.split(';')[0].split(':')[1],\n base64Content,\n };\n } catch (error) {\n return {\n mimeType: undefined,\n base64Content: undefined,\n };\n }\n}\n","import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1Prompt,\n LanguageModelV1TextPart,\n} from '@ai-sdk/provider';\nimport { download } from '../../util/download';\nimport { CoreMessage } from '../prompt/message';\nimport { detectImageMimeType } from '../util/detect-image-mimetype';\nimport { FilePart, ImagePart, TextPart } from './content-part';\nimport {\n convertDataContentToBase64String,\n convertDataContentToUint8Array,\n DataContent,\n} from './data-content';\nimport { InvalidMessageRoleError } from './invalid-message-role-error';\nimport { splitDataUrl } from './split-data-url';\nimport { StandardizedPrompt } from './standardize-prompt';\n\nexport async function convertToLanguageModelPrompt({\n prompt,\n modelSupportsImageUrls = true,\n modelSupportsUrl = () => false,\n downloadImplementation = download,\n}: {\n prompt: StandardizedPrompt;\n modelSupportsImageUrls: boolean | undefined;\n modelSupportsUrl: undefined | ((url: URL) => boolean);\n downloadImplementation?: typeof download;\n}): Promise<LanguageModelV1Prompt> {\n const downloadedAssets = await downloadAssets(\n prompt.messages,\n downloadImplementation,\n modelSupportsImageUrls,\n modelSupportsUrl,\n );\n\n return [\n ...(prompt.system != null\n ? [{ role: 'system' as const, content: prompt.system }]\n : []),\n ...prompt.messages.map(message =>\n convertToLanguageModelMessage(message, downloadedAssets),\n ),\n ];\n}\n\n/**\n * Convert a CoreMessage to a LanguageModelV1Message.\n *\n * @param message The CoreMessage to convert.\n * @param downloadedAssets A map of URLs to their downloaded data. Only\n * available if the model does not support URLs, null otherwise.\n */\nexport function convertToLanguageModelMessage(\n message: CoreMessage,\n downloadedAssets: Record<\n string,\n { mimeType: string | undefined; data: Uint8Array }\n >,\n): LanguageModelV1Message {\n const role = message.role;\n switch (role) {\n case 'system': {\n return {\n role: 'system',\n content: message.content,\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'user': {\n if (typeof message.content === 'string') {\n return {\n role: 'user',\n content: [{ type: 'text', text: message.content }],\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n return {\n role: 'user',\n content: message.content\n .map(part => convertPartToLanguageModelPart(part, downloadedAssets))\n // remove empty text parts:\n .filter(part => part.type !== 'text' || part.text !== ''),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'assistant': {\n if (typeof message.content === 'string') {\n return {\n role: 'assistant',\n content: [{ type: 'text', text: message.content }],\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n return {\n role: 'assistant',\n content: message.content\n .filter(\n // remove empty text parts:\n part => part.type !== 'text' || part.text !== '',\n )\n .map(part => {\n const { experimental_providerMetadata, ...rest } = part;\n return {\n ...rest,\n providerMetadata: experimental_providerMetadata,\n };\n }),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n case 'tool': {\n return {\n role: 'tool',\n content: message.content.map(part => ({\n type: 'tool-result',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n content: part.experimental_content,\n isError: part.isError,\n providerMetadata: part.experimental_providerMetadata,\n })),\n providerMetadata: message.experimental_providerMetadata,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new InvalidMessageRoleError({ role: _exhaustiveCheck });\n }\n }\n}\n\n/**\n * Downloads images and files from URLs in the messages.\n */\nasync function downloadAssets(\n messages: CoreMessage[],\n downloadImplementation: typeof download,\n modelSupportsImageUrls: boolean | undefined,\n modelSupportsUrl: (url: URL) => boolean,\n): Promise<Record<string, { mimeType: string | undefined; data: Uint8Array }>> {\n const urls = messages\n .filter(message => message.role === 'user')\n .map(message => message.content)\n .filter((content): content is Array<TextPart | ImagePart | FilePart> =>\n Array.isArray(content),\n )\n .flat()\n .filter(\n (part): part is ImagePart | FilePart =>\n part.type === 'image' || part.type === 'file',\n )\n /**\n * Filter out image parts if the model supports image URLs, before letting it\n * decide if it supports a particular URL.\n */\n .filter(\n (part): part is ImagePart | FilePart =>\n !(part.type === 'image' && modelSupportsImageUrls === true),\n )\n .map(part => (part.type === 'image' ? part.image : part.data))\n .map(part =>\n // support string urls:\n typeof part === 'string' &&\n (part.startsWith('http:') || part.startsWith('https:'))\n ? new URL(part)\n : part,\n )\n .filter((image): image is URL => image instanceof URL)\n /**\n * Filter out URLs that the model supports natively, so we don't download them.\n */\n .filter(url => !modelSupportsUrl(url));\n\n // download in parallel:\n const downloadedImages = await Promise.all(\n urls.map(async url => ({\n url,\n data: await downloadImplementation({ url }),\n })),\n );\n\n return Object.fromEntries(\n downloadedImages.map(({ url, data }) => [url.toString(), data]),\n );\n}\n\n/**\n * Convert part of a message to a LanguageModelV1Part.\n * @param part The part to convert.\n * @param downloadedAssets A map of URLs to their downloaded data. Only\n * available if the model does not support URLs, null otherwise.\n *\n * @returns The converted part.\n */\nfunction convertPartToLanguageModelPart(\n part: TextPart | ImagePart | FilePart,\n downloadedAssets: Record<\n string,\n { mimeType: string | undefined; data: Uint8Array }\n >,\n):\n | LanguageModelV1TextPart\n | LanguageModelV1ImagePart\n | LanguageModelV1FilePart {\n if (part.type === 'text') {\n return {\n type: 'text',\n text: part.text,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n\n let mimeType: string | undefined = part.mimeType;\n let data: DataContent | URL;\n let content: URL | ArrayBuffer | string;\n let normalizedData: Uint8Array | URL;\n\n const type = part.type;\n switch (type) {\n case 'image':\n data = part.image;\n break;\n case 'file':\n data = part.data;\n break;\n default:\n throw new Error(`Unsupported part type: ${type}`);\n }\n\n // Attempt to create a URL from the data. If it fails, we can assume the data\n // is not a URL and likely some other sort of data.\n try {\n content = typeof data === 'string' ? new URL(data) : data;\n } catch (error) {\n content = data;\n }\n\n // If we successfully created a URL, we can use that to normalize the data\n // either by passing it through or converting normalizing the base64 content\n // to a Uint8Array.\n if (content instanceof URL) {\n // If the content is a data URL, we want to convert that to a Uint8Array\n if (content.protocol === 'data:') {\n const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(\n content.toString(),\n );\n\n if (dataUrlMimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format in part ${type}`);\n }\n\n mimeType = dataUrlMimeType;\n normalizedData = convertDataContentToUint8Array(base64Content);\n } else {\n /**\n * If the content is a URL, we should first see if it was downloaded. And if not,\n * we can let the model decide if it wants to support the URL. This also allows\n * for non-HTTP URLs to be passed through (e.g. gs://).\n */\n const downloadedFile = downloadedAssets[content.toString()];\n if (downloadedFile) {\n normalizedData = downloadedFile.data;\n mimeType ??= downloadedFile.mimeType;\n } else {\n normalizedData = content;\n }\n }\n } else {\n // Since we know know the content is not a URL, we can attempt to normalize the data\n // assuming it is some sort of data.\n normalizedData = convertDataContentToUint8Array(content);\n }\n\n // Now that we have the normalized data either as a URL or a Uint8Array,\n // we can create the LanguageModelV1Part.\n switch (type) {\n case 'image': {\n // When possible, try to detect the mimetype automatically\n // to deal with incorrect mimetype inputs.\n // When detection fails, use provided mimetype.\n\n if (normalizedData instanceof Uint8Array) {\n mimeType = detectImageMimeType(normalizedData) ?? mimeType;\n }\n return {\n type: 'image',\n image: normalizedData,\n mimeType,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n\n case 'file': {\n // We should have a mimeType at this point, if not, throw an error.\n if (mimeType == null) {\n throw new Error(`Mime type is missing for file part`);\n }\n\n return {\n type: 'file',\n data:\n normalizedData instanceof Uint8Array\n ? convertDataContentToBase64String(normalizedData)\n : normalizedData,\n mimeType,\n providerMetadata: part.experimental_providerMetadata,\n };\n }\n }\n}\n","import { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { CallSettings } from './call-settings';\n\n/**\n * Validates call settings and sets default values.\n */\nexport function prepareCallSettings({\n maxTokens,\n temperature,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences,\n seed,\n}: Omit<CallSettings, 'abortSignal' | 'headers' | 'maxRetries'>): Omit<\n CallSettings,\n 'abortSignal' | 'headers' | 'maxRetries'\n> {\n if (maxTokens != null) {\n if (!Number.isInteger(maxTokens)) {\n throw new InvalidArgumentError({\n parameter: 'maxTokens',\n value: maxTokens,\n message: 'maxTokens must be an integer',\n });\n }\n\n if (maxTokens < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxTokens',\n value: maxTokens,\n message: 'maxTokens must be >= 1',\n });\n }\n }\n\n if (temperature != null) {\n if (typeof temperature !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'temperature',\n value: temperature,\n message: 'temperature must be a number',\n });\n }\n }\n\n if (topP != null) {\n if (typeof topP !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'topP',\n value: topP,\n message: 'topP must be a number',\n });\n }\n }\n\n if (topK != null) {\n if (typeof topK !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'topK',\n value: topK,\n message: 'topK must be a number',\n });\n }\n }\n\n if (presencePenalty != null) {\n if (typeof presencePenalty !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'presencePenalty',\n value: presencePenalty,\n message: 'presencePenalty must be a number',\n });\n }\n }\n\n if (frequencyPenalty != null) {\n if (typeof frequencyPenalty !== 'number') {\n throw new InvalidArgumentError({\n parameter: 'frequencyPenalty',\n value: frequencyPenalty,\n message: 'frequencyPenalty must be a number',\n });\n }\n }\n\n if (seed != null) {\n if (!Number.isInteger(seed)) {\n throw new InvalidArgumentError({\n parameter: 'seed',\n value: seed,\n message: 'seed must be an integer',\n });\n }\n }\n\n return {\n maxTokens,\n temperature: temperature ?? 0,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences:\n stopSequences != null && stopSequences.length > 0\n ? stopSequences\n : undefined,\n seed,\n };\n}\n","import { InvalidPromptError } from '@ai-sdk/provider';\nimport { safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { CoreMessage, coreMessageSchema } from './message';\nimport { Prompt } from './prompt';\nimport { detectPromptType } from './detect-prompt-type';\nimport { convertToCoreMessages } from './convert-to-core-messages';\nimport { UIMessage } from './ui-message';\nimport { CoreTool } from '../tool/tool';\n\nexport type StandardizedPrompt = {\n /**\n * Original prompt type. This is forwarded to the providers and can be used\n * to write send raw text to providers that support it.\n */\n type: 'prompt' | 'messages';\n\n /**\n * System message.\n */\n system?: string;\n\n /**\n * Messages.\n */\n messages: CoreMessage[];\n};\n\nexport function standardizePrompt<TOOLS extends Record<string, CoreTool>>({\n prompt,\n tools,\n}: {\n prompt: Prompt;\n tools: undefined | TOOLS;\n}): StandardizedPrompt {\n if (prompt.prompt == null && prompt.messages == null) {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt or messages must be defined',\n });\n }\n\n if (prompt.prompt != null && prompt.messages != null) {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt and messages cannot be defined at the same time',\n });\n }\n\n // validate that system is a string\n if (prompt.system != null && typeof prompt.system !== 'string') {\n throw new InvalidPromptError({\n prompt,\n message: 'system must be a string',\n });\n }\n\n // type: prompt\n if (prompt.prompt != null) {\n // validate that prompt is a string\n if (typeof prompt.prompt !== 'string') {\n throw new InvalidPromptError({\n prompt,\n message: 'prompt must be a string',\n });\n }\n\n return {\n type: 'prompt',\n system: prompt.system,\n messages: [\n {\n role: 'user',\n content: prompt.prompt,\n },\n ],\n };\n }\n\n // type: messages\n if (prompt.messages != null) {\n const promptType = detectPromptType(prompt.messages);\n\n if (promptType === 'other') {\n throw new InvalidPromptError({\n prompt,\n message: 'messages must be an array of CoreMessage or UIMessage',\n });\n }\n\n const messages: CoreMessage[] =\n promptType === 'ui-messages'\n ? convertToCoreMessages(prompt.messages as UIMessage[], {\n tools,\n })\n : (prompt.messages as CoreMessage[]);\n\n const validationResult = safeValidateTypes({\n value: messages,\n schema: z.array(coreMessageSchema),\n });\n\n if (!validationResult.success) {\n throw new InvalidPromptError({\n prompt,\n message: 'messages must be an array of CoreMessage or UIMessage',\n cause: validationResult.error,\n });\n }\n\n return {\n type: 'messages',\n messages,\n system: prompt.system,\n };\n }\n\n throw new Error('unreachable');\n}\n","import { z } from 'zod';\nimport { ProviderMetadata } from '../types';\nimport { providerMetadataSchema } from '../types/provider-metadata';\nimport {\n FilePart,\n filePartSchema,\n ImagePart,\n imagePartSchema,\n TextPart,\n textPartSchema,\n ToolCallPart,\n toolCallPartSchema,\n ToolResultPart,\n toolResultPartSchema,\n} from './content-part';\n\n/**\n A system message. It can contain system information.\n\n Note: using the \"system\" part of the prompt is strongly preferred\n to increase the resilience against prompt injection attacks,\n and because not all providers support several system messages.\n */\nexport type CoreSystemMessage = {\n role: 'system';\n content: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreSystemMessageSchema: z.ZodType<CoreSystemMessage> = z.object({\n role: z.literal('system'),\n content: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nA user message. It can contain text or a combination of text and images.\n */\nexport type CoreUserMessage = {\n role: 'user';\n content: UserContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreUserMessageSchema: z.ZodType<CoreUserMessage> = z.object({\n role: z.literal('user'),\n content: z.union([\n z.string(),\n z.array(z.union([textPartSchema, imagePartSchema, filePartSchema])),\n ]),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nContent of a user message. It can be a string or an array of text and image parts.\n */\nexport type UserContent = string | Array<TextPart | ImagePart | FilePart>;\n\n/**\nAn assistant message. It can contain text, tool calls, or a combination of text and tool calls.\n */\nexport type CoreAssistantMessage = {\n role: 'assistant';\n content: AssistantContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreAssistantMessageSchema: z.ZodType<CoreAssistantMessage> =\n z.object({\n role: z.literal('assistant'),\n content: z.union([\n z.string(),\n z.array(z.union([textPartSchema, toolCallPartSchema])),\n ]),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n });\n\n/**\nContent of an assistant message. It can be a string or an array of text and tool call parts.\n */\nexport type AssistantContent = string | Array<TextPart | ToolCallPart>;\n\n/**\nA tool message. It contains the result of one or more tool calls.\n */\nexport type CoreToolMessage = {\n role: 'tool';\n content: ToolContent;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n};\n\nexport const coreToolMessageSchema: z.ZodType<CoreToolMessage> = z.object({\n role: z.literal('tool'),\n content: z.array(toolResultPartSchema),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nContent of a tool message. It is an array of tool result parts.\n */\nexport type ToolContent = Array<ToolResultPart>;\n\n/**\nA message that can be used in the `messages` field of a prompt.\nIt can be a user message, an assistant message, or a tool message.\n */\nexport type CoreMessage =\n | CoreSystemMessage\n | CoreUserMessage\n | CoreAssistantMessage\n | CoreToolMessage;\n\nexport const coreMessageSchema: z.ZodType<CoreMessage> = z.union([\n coreSystemMessageSchema,\n coreUserMessageSchema,\n coreAssistantMessageSchema,\n coreToolMessageSchema,\n]);\n","import { LanguageModelV1ProviderMetadata } from '@ai-sdk/provider';\nimport { z } from 'zod';\nimport { jsonValueSchema } from './json-value';\n\n/**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\nexport type ProviderMetadata = LanguageModelV1ProviderMetadata;\n\nexport const providerMetadataSchema: z.ZodType<ProviderMetadata> = z.record(\n z.string(),\n z.record(z.string(), jsonValueSchema),\n);\n","import { JSONValue } from '@ai-sdk/provider';\nimport { z } from 'zod';\n\nexport const jsonValueSchema: z.ZodType<JSONValue> = z.lazy(() =>\n z.union([\n z.null(),\n z.string(),\n z.number(),\n z.boolean(),\n z.record(z.string(), jsonValueSchema),\n z.array(jsonValueSchema),\n ]),\n);\n","import { z } from 'zod';\nimport {\n ProviderMetadata,\n providerMetadataSchema,\n} from '../types/provider-metadata';\nimport { DataContent, dataContentSchema } from './data-content';\nimport {\n ToolResultContent,\n toolResultContentSchema,\n} from './tool-result-content';\n\n/**\nText content part of a prompt. It contains a string of text.\n */\nexport interface TextPart {\n type: 'text';\n\n /**\nThe text content.\n */\n text: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const textPartSchema: z.ZodType<TextPart> = z.object({\n type: z.literal('text'),\n text: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nImage content part of a prompt. It contains an image.\n */\nexport interface ImagePart {\n type: 'image';\n\n /**\nImage data. Can either be:\n\n- data: a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer\n- URL: a URL that points to the image\n */\n image: DataContent | URL;\n\n /**\nOptional mime type of the image.\n */\n mimeType?: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const imagePartSchema: z.ZodType<ImagePart> = z.object({\n type: z.literal('image'),\n image: z.union([dataContentSchema, z.instanceof(URL)]),\n mimeType: z.string().optional(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nFile content part of a prompt. It contains a file.\n */\nexport interface FilePart {\n type: 'file';\n\n /**\nFile data. Can either be:\n\n- data: a base64-encoded string, a Uint8Array, an ArrayBuffer, or a Buffer\n- URL: a URL that points to the image\n */\n data: DataContent | URL;\n\n /**\nMime type of the file.\n */\n mimeType: string;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const filePartSchema: z.ZodType<FilePart> = z.object({\n type: z.literal('file'),\n data: z.union([dataContentSchema, z.instanceof(URL)]),\n mimeType: z.string(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n});\n\n/**\nTool call content part of a prompt. It contains a tool call (usually generated by the AI model).\n */\nexport interface ToolCallPart {\n type: 'tool-call';\n\n /**\nID of the tool call. This ID is used to match the tool call with the tool result.\n */\n toolCallId: string;\n\n /**\nName of the tool that is being called.\n */\n toolName: string;\n\n /**\nArguments of the tool call. This is a JSON-serializable object that matches the tool's input schema.\n */\n args: unknown;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const toolCallPartSchema: z.ZodType<ToolCallPart> = z.object({\n type: z.literal('tool-call'),\n toolCallId: z.string(),\n toolName: z.string(),\n args: z.unknown(),\n}) as z.ZodType<ToolCallPart>; // necessary bc args is optional on Zod type\n\n/**\nTool result content part of a prompt. It contains the result of the tool call with the matching ID.\n */\nexport interface ToolResultPart {\n type: 'tool-result';\n\n /**\nID of the tool call that this result is associated with.\n */\n toolCallId: string;\n\n /**\nName of the tool that generated this result.\n */\n toolName: string;\n\n /**\nResult of the tool call. This is a JSON-serializable object.\n */\n result: unknown;\n\n /**\nMulti-part content of the tool result. Only for tools that support multipart results.\n */\n experimental_content?: ToolResultContent;\n\n /**\nOptional flag if the result is an error or an error message.\n */\n isError?: boolean;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n}\n\nexport const toolResultPartSchema: z.ZodType<ToolResultPart> = z.object({\n type: z.literal('tool-result'),\n toolCallId: z.string(),\n toolName: z.string(),\n result: z.unknown(),\n content: toolResultContentSchema.optional(),\n isError: z.boolean().optional(),\n experimental_providerMetadata: providerMetadataSchema.optional(),\n}) as z.ZodType<ToolResultPart>; // necessary bc result is optional on Zod type\n","import { z } from 'zod';\n\nexport type ToolResultContent = Array<\n | {\n type: 'text';\n text: string;\n }\n | {\n type: 'image';\n data: string; // base64 encoded png image, e.g. screenshot\n mimeType?: string; // e.g. 'image/png';\n }\n>;\n\nexport const toolResultContentSchema: z.ZodType<ToolResultContent> = z.array(\n z.union([\n z.object({ type: z.literal('text'), text: z.string() }),\n z.object({\n type: z.literal('image'),\n data: z.string(),\n mimeType: z.string().optional(),\n }),\n ]),\n);\n\nexport function isToolResultContent(\n value: unknown,\n): value is ToolResultContent {\n if (!Array.isArray(value) || value.length === 0) {\n return false;\n }\n\n return value.every(part => {\n if (typeof part !== 'object' || part === null) {\n return false;\n }\n\n if (part.type === 'text') {\n return typeof part.text === 'string';\n }\n\n if (part.type === 'image') {\n return (\n typeof part.data === 'string' &&\n (part.mimeType === undefined || typeof part.mimeType === 'string')\n );\n }\n\n return false;\n });\n}\n","export function detectPromptType(\n prompt: Array<any>,\n): 'ui-messages' | 'messages' | 'other' {\n if (!Array.isArray(prompt)) {\n return 'other';\n }\n\n if (prompt.length === 0) {\n return 'messages';\n }\n\n const characteristics = prompt.map(detectSingleMessageCharacteristics);\n\n if (characteristics.some(c => c === 'has-ui-specific-parts')) {\n return 'ui-messages';\n } else if (\n characteristics.every(\n c => c === 'has-core-specific-parts' || c === 'message',\n )\n ) {\n return 'messages';\n } else {\n return 'other';\n }\n}\n\nfunction detectSingleMessageCharacteristics(\n message: any,\n): 'has-ui-specific-parts' | 'has-core-specific-parts' | 'message' | 'other' {\n if (\n typeof message === 'object' &&\n message !== null &&\n (message.role === 'function' || // UI-only role\n message.role === 'data' || // UI-only role\n 'toolInvocations' in message || // UI-specific field\n 'experimental_attachments' in message)\n ) {\n return 'has-ui-specific-parts';\n } else if (\n typeof message === 'object' &&\n message !== null &&\n 'content' in message &&\n (Array.isArray(message.content) || // Core messages can have array content\n 'experimental_providerMetadata' in message)\n ) {\n return 'has-core-specific-parts';\n } else if (\n typeof message === 'object' &&\n message !== null &&\n 'role' in message &&\n 'content' in message &&\n typeof message.content === 'string' &&\n ['system', 'user', 'assistant', 'tool'].includes(message.role)\n ) {\n return 'message';\n } else {\n return 'other';\n }\n}\n","import { Attachment } from '@ai-sdk/ui-utils';\nimport { FilePart, ImagePart, TextPart } from './content-part';\nimport {\n convertDataContentToUint8Array,\n convertUint8ArrayToText,\n} from './data-content';\n\ntype ContentPart = TextPart | ImagePart | FilePart;\n\n/**\n * Converts a list of attachments to a list of content parts\n * for consumption by `ai/core` functions.\n * Currently only supports images and text attachments.\n */\nexport function attachmentsToParts(attachments: Attachment[]): ContentPart[] {\n const parts: ContentPart[] = [];\n\n for (const attachment of attachments) {\n let url;\n\n try {\n url = new URL(attachment.url);\n } catch (error) {\n throw new Error(`Invalid URL: ${attachment.url}`);\n }\n\n switch (url.protocol) {\n case 'http:':\n case 'https:': {\n if (attachment.contentType?.startsWith('image/')) {\n parts.push({ type: 'image', image: url });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n 'If the attachment is not an image, it must specify a content type',\n );\n }\n\n parts.push({\n type: 'file',\n data: url,\n mimeType: attachment.contentType,\n });\n }\n break;\n }\n\n case 'data:': {\n let header;\n let base64Content;\n let mimeType;\n\n try {\n [header, base64Content] = attachment.url.split(',');\n mimeType = header.split(';')[0].split(':')[1];\n } catch (error) {\n throw new Error(`Error processing data URL: ${attachment.url}`);\n }\n\n if (mimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format: ${attachment.url}`);\n }\n\n if (attachment.contentType?.startsWith('image/')) {\n parts.push({\n type: 'image',\n image: convertDataContentToUint8Array(base64Content),\n });\n } else if (attachment.contentType?.startsWith('text/')) {\n parts.push({\n type: 'text',\n text: convertUint8ArrayToText(\n convertDataContentToUint8Array(base64Content),\n ),\n });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n 'If the attachment is not an image or text, it must specify a content type',\n );\n }\n\n parts.push({\n type: 'file',\n data: base64Content,\n mimeType: attachment.contentType,\n });\n }\n\n break;\n }\n\n default: {\n throw new Error(`Unsupported URL protocol: ${url.protocol}`);\n }\n }\n }\n\n return parts;\n}\n","import { AISDKError } from '@ai-sdk/provider';\nimport { UIMessage } from './ui-message';\n\nconst name = 'AI_MessageConversionError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class MessageConversionError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly originalMessage: UIMessage;\n\n constructor({\n originalMessage,\n message,\n }: {\n originalMessage: UIMessage;\n message: string;\n }) {\n super({ name, message });\n\n this.originalMessage = originalMessage;\n }\n\n static isInstance(error: unknown): error is MessageConversionError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { CoreMessage, ToolCallPart, ToolResultPart } from '../prompt';\nimport { CoreTool } from '../tool/tool';\nimport { attachmentsToParts } from './attachments-to-parts';\nimport { MessageConversionError } from './message-conversion-error';\nimport { UIMessage } from './ui-message';\n\n/**\nConverts an array of messages from useChat into an array of CoreMessages that can be used\nwith the AI core functions (e.g. `streamText`).\n */\nexport function convertToCoreMessages<\n TOOLS extends Record<string, CoreTool> = never,\n>(messages: Array<UIMessage>, options?: { tools?: TOOLS }) {\n const tools = options?.tools ?? ({} as TOOLS);\n const coreMessages: CoreMessage[] = [];\n\n for (const message of messages) {\n const { role, content, toolInvocations, experimental_attachments } =\n message;\n\n switch (role) {\n case 'system': {\n coreMessages.push({\n role: 'system',\n content,\n });\n break;\n }\n\n case 'user': {\n coreMessages.push({\n role: 'user',\n content: experimental_attachments\n ? [\n { type: 'text', text: content },\n ...attachmentsToParts(experimental_attachments),\n ]\n : content,\n });\n break;\n }\n\n case 'assistant': {\n if (toolInvocations == null) {\n coreMessages.push({ role: 'assistant', content });\n break;\n }\n\n // assistant message with tool calls\n coreMessages.push({\n role: 'assistant',\n content: [\n { type: 'text', text: content },\n ...toolInvocations.map(\n ({ toolCallId, toolName, args }): ToolCallPart => ({\n type: 'tool-call' as const,\n toolCallId,\n toolName,\n args,\n }),\n ),\n ],\n });\n\n // tool message with tool results\n coreMessages.push({\n role: 'tool',\n content: toolInvocations.map((toolInvocation): ToolResultPart => {\n if (!('result' in toolInvocation)) {\n throw new MessageConversionError({\n originalMessage: message,\n message:\n 'ToolInvocation must have a result: ' +\n JSON.stringify(toolInvocation),\n });\n }\n\n const { toolCallId, toolName, result } = toolInvocation;\n\n const tool = tools[toolName];\n return tool?.experimental_toToolResultContent != null\n ? {\n type: 'tool-result',\n toolCallId,\n toolName,\n result: tool.experimental_toToolResultContent(result),\n experimental_content:\n tool.experimental_toToolResultContent(result),\n }\n : {\n type: 'tool-result',\n toolCallId,\n toolName,\n result,\n };\n }),\n });\n\n break;\n }\n\n case 'data': {\n // ignore\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new MessageConversionError({\n originalMessage: message,\n message: `Unsupported role: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n return coreMessages;\n}\n","/**\nRepresents the number of tokens used in a prompt and completion.\n */\nexport type LanguageModelUsage = {\n /**\nThe number of tokens used in the prompt.\n */\n promptTokens: number;\n\n /**\nThe number of tokens used in the completion.\n */\n completionTokens: number;\n\n /**\nThe total number of tokens used (promptTokens + completionTokens).\n */\n totalTokens: number;\n};\n\n/**\nRepresents the number of tokens used in an embedding.\n */\nexport type EmbeddingModelUsage = {\n /**\nThe number of tokens used in the embedding.\n */\n tokens: number;\n};\n\nexport function calculateLanguageModelUsage({\n promptTokens,\n completionTokens,\n}: {\n promptTokens: number;\n completionTokens: number;\n}): LanguageModelUsage {\n return {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens,\n };\n}\n\nexport function addLanguageModelUsage(\n usage1: LanguageModelUsage,\n usage2: LanguageModelUsage,\n): LanguageModelUsage {\n return {\n promptTokens: usage1.promptTokens + usage2.promptTokens,\n completionTokens: usage1.completionTokens + usage2.completionTokens,\n totalTokens: usage1.totalTokens + usage2.totalTokens,\n };\n}\n","import { JSONSchema7 } from '@ai-sdk/provider';\n\nconst DEFAULT_SCHEMA_PREFIX = 'JSON schema:';\nconst DEFAULT_SCHEMA_SUFFIX =\n 'You MUST answer with a JSON object that matches the JSON schema above.';\nconst DEFAULT_GENERIC_SUFFIX = 'You MUST answer with JSON.';\n\nexport function injectJsonInstruction({\n prompt,\n schema,\n schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : undefined,\n schemaSuffix = schema != null\n ? DEFAULT_SCHEMA_SUFFIX\n : DEFAULT_GENERIC_SUFFIX,\n}: {\n prompt?: string;\n schema?: JSONSchema7;\n schemaPrefix?: string;\n schemaSuffix?: string;\n}): string {\n return [\n prompt != null && prompt.length > 0 ? prompt : undefined,\n prompt != null && prompt.length > 0 ? '' : undefined, // add a newline if prompt is not null\n schemaPrefix,\n schema != null ? JSON.stringify(schema) : undefined,\n schemaSuffix,\n ]\n .filter(line => line != null)\n .join('\\n');\n}\n","import {\n isJSONArray,\n isJSONObject,\n JSONObject,\n JSONSchema7,\n JSONValue,\n TypeValidationError,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { safeValidateTypes, ValidationResult } from '@ai-sdk/provider-utils';\nimport { asSchema, DeepPartial, Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { ObjectStreamPart } from './stream-object-result';\nimport { LanguageModelResponseMetadata, LanguageModelUsage } from '../types';\n\nexport interface OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM> {\n readonly type: 'object' | 'array' | 'enum' | 'no-schema';\n readonly jsonSchema: JSONSchema7 | undefined;\n\n validatePartialResult({\n value,\n textDelta,\n isFinalDelta,\n }: {\n value: JSONValue;\n textDelta: string;\n isFirstDelta: boolean;\n isFinalDelta: boolean;\n latestObject: PARTIAL | undefined;\n }): ValidationResult<{\n partial: PARTIAL;\n textDelta: string;\n }>;\n validateFinalResult(\n value: JSONValue | undefined,\n context: {\n text: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): ValidationResult<RESULT>;\n\n createElementStream(\n originalStream: ReadableStream<ObjectStreamPart<PARTIAL>>,\n ): ELEMENT_STREAM;\n}\n\nconst noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {\n type: 'no-schema',\n jsonSchema: undefined,\n\n validatePartialResult({ value, textDelta }) {\n return { success: true, value: { partial: value, textDelta } };\n },\n\n validateFinalResult(\n value: JSONValue | undefined,\n context: {\n text: string;\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): ValidationResult<JSONValue> {\n return value === undefined\n ? {\n success: false,\n error: new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n text: context.text,\n response: context.response,\n usage: context.usage,\n }),\n }\n : { success: true, value };\n },\n\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in no-schema mode',\n });\n },\n};\n\nconst objectOutputStrategy = <OBJECT>(\n schema: Schema<OBJECT>,\n): OutputStrategy<DeepPartial<OBJECT>, OBJECT, never> => ({\n type: 'object',\n jsonSchema: schema.jsonSchema,\n\n validatePartialResult({ value, textDelta }) {\n return {\n success: true,\n value: {\n // Note: currently no validation of partial results:\n partial: value as DeepPartial<OBJECT>,\n textDelta,\n },\n };\n },\n\n validateFinalResult(value: JSONValue | undefined): ValidationResult<OBJECT> {\n return safeValidateTypes({ value, schema });\n },\n\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in object mode',\n });\n },\n});\n\nconst arrayOutputStrategy = <ELEMENT>(\n schema: Schema<ELEMENT>,\n): OutputStrategy<ELEMENT[], ELEMENT[], AsyncIterableStream<ELEMENT>> => {\n // remove $schema from schema.jsonSchema:\n const { $schema, ...itemSchema } = schema.jsonSchema;\n\n return {\n type: 'enum',\n\n // wrap in object that contains array of elements, since most LLMs will not\n // be able to generate an array directly:\n // possible future optimization: use arrays directly when model supports grammar-guided generation\n jsonSchema: {\n $schema: 'http://json-schema.org/draft-07/schema#',\n type: 'object',\n properties: {\n elements: { type: 'array', items: itemSchema },\n },\n required: ['elements'],\n additionalProperties: false,\n },\n\n validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be an object that contains an array of elements',\n }),\n };\n }\n\n const inputArray = value.elements as Array<JSONObject>;\n const resultArray: Array<ELEMENT> = [];\n\n for (let i = 0; i < inputArray.length; i++) {\n const element = inputArray[i];\n const result = safeValidateTypes({ value: element, schema });\n\n // special treatment for last processed element:\n // ignore parse or validation failures, since they indicate that the\n // last element is incomplete and should not be included in the result,\n // unless it is the final delta\n if (i === inputArray.length - 1 && !isFinalDelta) {\n continue;\n }\n\n if (!result.success) {\n return result;\n }\n\n resultArray.push(result.value);\n }\n\n // calculate delta:\n const publishedElementCount = latestObject?.length ?? 0;\n\n let textDelta = '';\n\n if (isFirstDelta) {\n textDelta += '[';\n }\n\n if (publishedElementCount > 0) {\n textDelta += ',';\n }\n\n textDelta += resultArray\n .slice(publishedElementCount) // only new elements\n .map(element => JSON.stringify(element))\n .join(',');\n\n if (isFinalDelta) {\n textDelta += ']';\n }\n\n return {\n success: true,\n value: {\n partial: resultArray,\n textDelta,\n },\n };\n },\n\n validateFinalResult(\n value: JSONValue | undefined,\n ): ValidationResult<Array<ELEMENT>> {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be an object that contains an array of elements',\n }),\n };\n }\n\n const inputArray = value.elements as Array<JSONObject>;\n\n // check that each element in the array is of the correct type:\n for (const element of inputArray) {\n const result = safeValidateTypes({ value: element, schema });\n if (!result.success) {\n return result;\n }\n }\n\n return { success: true, value: inputArray as Array<ELEMENT> };\n },\n\n createElementStream(\n originalStream: ReadableStream<ObjectStreamPart<ELEMENT[]>>,\n ) {\n let publishedElements = 0;\n\n return createAsyncIterableStream(\n originalStream.pipeThrough(\n new TransformStream<ObjectStreamPart<ELEMENT[]>, ELEMENT>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'object': {\n const array = chunk.object;\n\n // publish new elements one by one:\n for (\n ;\n publishedElements < array.length;\n publishedElements++\n ) {\n controller.enqueue(array[publishedElements]);\n }\n\n break;\n }\n\n case 'text-delta':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(\n `Unsupported chunk type: ${_exhaustiveCheck}`,\n );\n }\n }\n },\n }),\n ),\n );\n },\n };\n};\n\nconst enumOutputStrategy = <ENUM extends string>(\n enumValues: Array<ENUM>,\n): OutputStrategy<ENUM, ENUM, never> => {\n return {\n type: 'enum',\n\n // wrap in object that contains result, since most LLMs will not\n // be able to generate an enum value directly:\n // possible future optimization: use enums directly when model supports top-level enums\n jsonSchema: {\n $schema: 'http://json-schema.org/draft-07/schema#',\n type: 'object',\n properties: {\n result: { type: 'string', enum: enumValues },\n },\n required: ['result'],\n additionalProperties: false,\n },\n\n validateFinalResult(value: JSONValue | undefined): ValidationResult<ENUM> {\n // check that the value is an object that contains an array of elements:\n if (!isJSONObject(value) || typeof value.result !== 'string') {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause:\n 'value must be an object that contains a string in the \"result\" property.',\n }),\n };\n }\n\n const result = value.result as string;\n\n return enumValues.includes(result as ENUM)\n ? { success: true, value: result as ENUM }\n : {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be a string in the enum',\n }),\n };\n },\n\n validatePartialResult() {\n // no streaming in enum mode\n throw new UnsupportedFunctionalityError({\n functionality: 'partial results in enum mode',\n });\n },\n\n createElementStream() {\n // no streaming in enum mode\n throw new UnsupportedFunctionalityError({\n functionality: 'element streams in enum mode',\n });\n },\n };\n};\n\nexport function getOutputStrategy<SCHEMA>({\n output,\n schema,\n enumValues,\n}: {\n output: 'object' | 'array' | 'enum' | 'no-schema';\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n enumValues?: Array<SCHEMA>;\n}): OutputStrategy<any, any, any> {\n switch (output) {\n case 'object':\n return objectOutputStrategy(asSchema(schema!));\n case 'array':\n return arrayOutputStrategy(asSchema(schema!));\n case 'enum':\n return enumOutputStrategy(enumValues! as Array<string>);\n case 'no-schema':\n return noSchemaOutputStrategy;\n default: {\n const _exhaustiveCheck: never = output;\n throw new Error(`Unsupported output: ${_exhaustiveCheck}`);\n }\n }\n}\n","export type AsyncIterableStream<T> = AsyncIterable<T> & ReadableStream<T>;\n\nexport function createAsyncIterableStream<T>(\n source: ReadableStream<T>,\n): AsyncIterableStream<T> {\n const stream = source.pipeThrough(new TransformStream<T, T>());\n\n (stream as AsyncIterableStream<T>)[Symbol.asyncIterator] = () => {\n const reader = stream.getReader();\n return {\n async next(): Promise<IteratorResult<T>> {\n const { done, value } = await reader.read();\n return done ? { done: true, value: undefined } : { done: false, value };\n },\n };\n };\n\n return stream as AsyncIterableStream<T>;\n}\n","import { z } from 'zod';\nimport { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { Schema } from '@ai-sdk/ui-utils';\n\nexport function validateObjectGenerationInput({\n output,\n mode,\n schema,\n schemaName,\n schemaDescription,\n enumValues,\n}: {\n output?: 'object' | 'array' | 'enum' | 'no-schema';\n schema?: z.Schema<any, z.ZodTypeDef, any> | Schema<any>;\n schemaName?: string;\n schemaDescription?: string;\n enumValues?: Array<unknown>;\n mode?: 'auto' | 'json' | 'tool';\n}) {\n if (\n output != null &&\n output !== 'object' &&\n output !== 'array' &&\n output !== 'enum' &&\n output !== 'no-schema'\n ) {\n throw new InvalidArgumentError({\n parameter: 'output',\n value: output,\n message: 'Invalid output type.',\n });\n }\n\n if (output === 'no-schema') {\n if (mode === 'auto' || mode === 'tool') {\n throw new InvalidArgumentError({\n parameter: 'mode',\n value: mode,\n message: 'Mode must be \"json\" for no-schema output.',\n });\n }\n\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is not supported for no-schema output.',\n });\n }\n\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaDescription',\n value: schemaDescription,\n message: 'Schema description is not supported for no-schema output.',\n });\n }\n\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaName',\n value: schemaName,\n message: 'Schema name is not supported for no-schema output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for no-schema output.',\n });\n }\n }\n\n if (output === 'object') {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is required for object output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for object output.',\n });\n }\n }\n\n if (output === 'array') {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Element schema is required for array output.',\n });\n }\n\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are not supported for array output.',\n });\n }\n }\n\n if (output === 'enum') {\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: 'schema',\n value: schema,\n message: 'Schema is not supported for enum output.',\n });\n }\n\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaDescription',\n value: schemaDescription,\n message: 'Schema description is not supported for enum output.',\n });\n }\n\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: 'schemaName',\n value: schemaName,\n message: 'Schema name is not supported for enum output.',\n });\n }\n\n if (enumValues == null) {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value: enumValues,\n message: 'Enum values are required for enum output.',\n });\n }\n\n for (const value of enumValues) {\n if (typeof value !== 'string') {\n throw new InvalidArgumentError({\n parameter: 'enumValues',\n value,\n message: 'Enum values must be strings.',\n });\n }\n }\n }\n}\n","import {\n JSONValue,\n LanguageModelV1CallOptions,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport { createIdGenerator } from '@ai-sdk/provider-utils';\nimport {\n DeepPartial,\n Schema,\n isDeepEqualData,\n parsePartialJson,\n} from '@ai-sdk/ui-utils';\nimport { ServerResponse } from 'http';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors/no-object-generated-error';\nimport { DelayedPromise } from '../../util/delayed-promise';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport {\n CallWarning,\n FinishReason,\n LanguageModel,\n LogProbs,\n} from '../types/language-model';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { ProviderMetadata } from '../types/provider-metadata';\nimport {\n LanguageModelUsage,\n calculateLanguageModelUsage,\n} from '../types/usage';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { createStitchableStream } from '../util/create-stitchable-stream';\nimport { now as originalNow } from '../util/now';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport { injectJsonInstruction } from './inject-json-instruction';\nimport { OutputStrategy, getOutputStrategy } from './output-strategy';\nimport { ObjectStreamPart, StreamObjectResult } from './stream-object-result';\nimport { validateObjectGenerationInput } from './validate-object-generation-input';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aiobj', size: 24 });\n\ntype OnFinishCallback<RESULT> = (event: {\n /**\nThe token usage of the generated response.\n*/\n usage: LanguageModelUsage;\n\n /**\nThe generated object. Can be undefined if the final object does not match the schema.\n*/\n object: RESULT | undefined;\n\n /**\nOptional error object. This is e.g. a TypeValidationError when the final object does not match the schema.\n*/\n error: unknown | undefined;\n\n /**\nResponse metadata.\n */\n response: LanguageModelResponseMetadata;\n\n /**\nWarnings from the model provider (e.g. unsupported settings).\n*/\n warnings?: CallWarning[];\n\n /**\nAdditional provider-specific metadata. They are passed through\nfrom the provider to the AI SDK and enable provider-specific\nresults that can be fully encapsulated in the provider.\n*/\n experimental_providerMetadata: ProviderMetadata | undefined;\n}) => Promise<void> | void;\n\n/**\nGenerate a structured, typed object for a given prompt and schema using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject<OBJECT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output?: 'object' | undefined;\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe schema of the object that the model should generate.\n */\n schema: z.Schema<OBJECT, z.ZodTypeDef, any> | Schema<OBJECT>;\n\n /**\nOptional name of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the output that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<OBJECT>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<DeepPartial<OBJECT>, OBJECT, never>;\n/**\nGenerate an array with structured, typed elements for a given prompt and element schema using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject<ELEMENT>(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'array';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe element schema of the array that the model should generate.\n */\n schema: z.Schema<ELEMENT, z.ZodTypeDef, any> | Schema<ELEMENT>;\n\n /**\nOptional name of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema name.\n */\n schemaName?: string;\n\n /**\nOptional description of the array that should be generated.\nUsed by some providers for additional LLM guidance, e.g.\nvia tool or schema description.\n */\n schemaDescription?: string;\n\n /**\nThe mode to use for object generation.\n\nThe schema is converted into a JSON schema and used in one of the following ways\n\n- 'auto': The provider will choose the best mode for the model.\n- 'tool': A tool with the JSON schema as parameters is provided and the provider is instructed to use it.\n- 'json': The JSON schema and an instruction are injected into the prompt. If the provider supports JSON mode, it is enabled. If the provider supports JSON grammars, the grammar is used.\n\nPlease note that most providers do not support all modes.\n\nDefault and recommended: 'auto' (best mode for the model).\n */\n mode?: 'auto' | 'json' | 'tool';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<Array<ELEMENT>>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<\n Array<ELEMENT>,\n Array<ELEMENT>,\n AsyncIterableStream<ELEMENT>\n>;\n/**\nGenerate JSON with any schema for a given prompt using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateObject` instead.\n\n@return\nA result object for accessing the partial object stream and additional information.\n */\nexport function streamObject(\n options: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n output: 'no-schema';\n\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe mode to use for object generation. Must be \"json\" for no-schema output.\n */\n mode?: 'json';\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nCallback that is called when the LLM response and the final object validation are finished.\n */\n onFinish?: OnFinishCallback<JSONValue>;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n },\n): StreamObjectResult<JSONValue, JSONValue, never>;\nexport function streamObject<SCHEMA, PARTIAL, RESULT, ELEMENT_STREAM>({\n model,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = 'object',\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n onFinish,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n now = originalNow,\n } = {},\n ...settings\n}: Omit<CallSettings, 'stopSequences'> &\n Prompt & {\n /**\n * The expected structure of the output.\n *\n * - 'object': Generate a single object that conforms to the schema.\n * - 'array': Generate an array of objects that conform to the schema.\n * - 'no-schema': Generate any JSON object. No schema is specified.\n *\n * Default is 'object' if not specified.\n */\n output?: 'object' | 'array' | 'no-schema';\n\n model: LanguageModel;\n schema?: z.Schema<SCHEMA, z.ZodTypeDef, any> | Schema<SCHEMA>;\n schemaName?: string;\n schemaDescription?: string;\n mode?: 'auto' | 'json' | 'tool';\n experimental_telemetry?: TelemetrySettings;\n experimental_providerMetadata?: ProviderMetadata;\n onFinish?: OnFinishCallback<RESULT>;\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n now?: () => number;\n };\n }): StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n });\n\n const outputStrategy = getOutputStrategy({ output, schema: inputSchema });\n\n // automatically set mode to 'json' for no-schema output\n if (outputStrategy.type === 'no-schema' && mode === undefined) {\n mode = 'json';\n }\n\n return new DefaultStreamObjectResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n inputProviderMetadata: providerMetadata,\n mode,\n onFinish,\n generateId,\n currentDate,\n now,\n });\n}\n\nclass DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>\n implements StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>\n{\n private readonly objectPromise = new DelayedPromise<RESULT>();\n private readonly usagePromise = new DelayedPromise<LanguageModelUsage>();\n private readonly providerMetadataPromise = new DelayedPromise<\n ProviderMetadata | undefined\n >();\n private readonly warningsPromise = new DelayedPromise<\n CallWarning[] | undefined\n >();\n private readonly requestPromise =\n new DelayedPromise<LanguageModelRequestMetadata>();\n private readonly responsePromise =\n new DelayedPromise<LanguageModelResponseMetadata>();\n\n private readonly stitchableStream =\n createStitchableStream<ObjectStreamPart<PARTIAL>>();\n\n private readonly outputStrategy: OutputStrategy<\n PARTIAL,\n RESULT,\n ELEMENT_STREAM\n >;\n\n constructor({\n model,\n headers,\n telemetry,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n inputProviderMetadata,\n mode,\n onFinish,\n generateId,\n currentDate,\n now,\n }: {\n model: LanguageModel;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n maxRetries: number | undefined;\n abortSignal: AbortSignal | undefined;\n outputStrategy: OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM>;\n system: Prompt['system'];\n prompt: Prompt['prompt'];\n messages: Prompt['messages'];\n schemaName: string | undefined;\n schemaDescription: string | undefined;\n inputProviderMetadata: ProviderMetadata | undefined;\n mode: 'auto' | 'json' | 'tool' | undefined;\n onFinish: OnFinishCallback<RESULT> | undefined;\n generateId: () => string;\n currentDate: () => Date;\n now: () => number;\n }) {\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg,\n });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const tracer = getTracer(telemetry);\n const self = this;\n\n recordSpan({\n name: 'ai.streamObject',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamObject',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.schema':\n outputStrategy.jsonSchema != null\n ? { input: () => JSON.stringify(outputStrategy.jsonSchema) }\n : undefined,\n 'ai.schema.name': schemaName,\n 'ai.schema.description': schemaDescription,\n 'ai.settings.output': outputStrategy.type,\n 'ai.settings.mode': mode,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async rootSpan => {\n // use the default provider mode when the mode is set to 'auto' or unspecified\n if (mode === 'auto' || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n\n let callOptions: LanguageModelV1CallOptions;\n let transformer: Transformer<\n LanguageModelV1StreamPart,\n string | Omit<LanguageModelV1StreamPart, 'text-delta'>\n >;\n\n switch (mode) {\n case 'json': {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system:\n outputStrategy.jsonSchema == null\n ? injectJsonInstruction({ prompt: system })\n : model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema,\n }),\n prompt,\n messages,\n },\n tools: undefined,\n });\n\n callOptions = {\n mode: {\n type: 'object-json',\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription,\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n }),\n providerMetadata: inputProviderMetadata,\n abortSignal,\n headers,\n };\n\n transformer = {\n transform: (chunk, controller) => {\n switch (chunk.type) {\n case 'text-delta':\n controller.enqueue(chunk.textDelta);\n break;\n case 'response-metadata':\n case 'finish':\n case 'error':\n controller.enqueue(chunk);\n break;\n }\n },\n };\n\n break;\n }\n\n case 'tool': {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: undefined,\n });\n\n callOptions = {\n mode: {\n type: 'object-tool',\n tool: {\n type: 'function',\n name: schemaName ?? 'json',\n description:\n schemaDescription ?? 'Respond with a JSON object.',\n parameters: outputStrategy.jsonSchema!,\n },\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n }),\n providerMetadata: inputProviderMetadata,\n abortSignal,\n headers,\n };\n\n transformer = {\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'tool-call-delta':\n controller.enqueue(chunk.argsTextDelta);\n break;\n case 'response-metadata':\n case 'finish':\n case 'error':\n controller.enqueue(chunk);\n break;\n }\n },\n };\n\n break;\n }\n\n case undefined: {\n throw new Error(\n 'Model does not have a default object generation mode.',\n );\n }\n\n default: {\n const _exhaustiveCheck: never = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n\n const {\n result: { stream, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs,\n } = await retry(() =>\n recordSpan({\n name: 'ai.streamObject.doStream',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamObject.doStream',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => callOptions.inputFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(callOptions.prompt),\n },\n 'ai.settings.mode': mode,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async doStreamSpan => ({\n startTimestampMs: now(),\n doStreamSpan,\n result: await model.doStream(callOptions),\n }),\n }),\n );\n\n self.requestPromise.resolve(request ?? {});\n\n // store information for onFinish callback:\n let usage: LanguageModelUsage | undefined;\n let finishReason: LanguageModelV1FinishReason | undefined;\n let providerMetadata: ProviderMetadata | undefined;\n let object: RESULT | undefined;\n let error: unknown | undefined;\n\n // pipe chunks through a transformation stream that extracts metadata:\n let accumulatedText = '';\n let textDelta = '';\n let response: {\n id: string;\n timestamp: Date;\n modelId: string;\n } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n };\n\n // Keep track of raw parse result before type validation, since e.g. Zod might\n // change the object by mapping properties.\n let latestObjectJson: JSONValue | undefined = undefined;\n let latestObject: PARTIAL | undefined = undefined;\n let isFirstChunk = true;\n let isFirstDelta = true;\n\n const transformedStream = stream\n .pipeThrough(new TransformStream(transformer))\n .pipeThrough(\n new TransformStream<\n string | ObjectStreamInputPart,\n ObjectStreamPart<PARTIAL>\n >({\n async transform(chunk, controller): Promise<void> {\n // Telemetry event for first chunk:\n if (isFirstChunk) {\n const msToFirstChunk = now() - startTimestampMs;\n\n isFirstChunk = false;\n\n doStreamSpan.addEvent('ai.stream.firstChunk', {\n 'ai.stream.msToFirstChunk': msToFirstChunk,\n });\n\n doStreamSpan.setAttributes({\n 'ai.stream.msToFirstChunk': msToFirstChunk,\n });\n }\n\n // process partial text chunks\n if (typeof chunk === 'string') {\n accumulatedText += chunk;\n textDelta += chunk;\n\n const { value: currentObjectJson, state: parseState } =\n parsePartialJson(accumulatedText);\n\n if (\n currentObjectJson !== undefined &&\n !isDeepEqualData(latestObjectJson, currentObjectJson)\n ) {\n const validationResult =\n outputStrategy.validatePartialResult({\n value: currentObjectJson,\n textDelta,\n latestObject,\n isFirstDelta,\n isFinalDelta: parseState === 'successful-parse',\n });\n\n if (\n validationResult.success &&\n !isDeepEqualData(\n latestObject,\n validationResult.value.partial,\n )\n ) {\n // inside inner check to correctly parse the final element in array mode:\n latestObjectJson = currentObjectJson;\n latestObject = validationResult.value.partial;\n\n controller.enqueue({\n type: 'object',\n object: latestObject,\n });\n\n controller.enqueue({\n type: 'text-delta',\n textDelta: validationResult.value.textDelta,\n });\n\n textDelta = '';\n isFirstDelta = false;\n }\n }\n\n return;\n }\n\n switch (chunk.type) {\n case 'response-metadata': {\n response = {\n id: chunk.id ?? response.id,\n timestamp: chunk.timestamp ?? response.timestamp,\n modelId: chunk.modelId ?? response.modelId,\n };\n break;\n }\n\n case 'finish': {\n // send final text delta:\n if (textDelta !== '') {\n controller.enqueue({ type: 'text-delta', textDelta });\n }\n\n // store finish reason for telemetry:\n finishReason = chunk.finishReason;\n\n // store usage and metadata for promises and onFinish callback:\n usage = calculateLanguageModelUsage(chunk.usage);\n providerMetadata = chunk.providerMetadata;\n\n controller.enqueue({ ...chunk, usage, response });\n\n // resolve promises that can be resolved now:\n self.usagePromise.resolve(usage);\n self.providerMetadataPromise.resolve(providerMetadata);\n self.responsePromise.resolve({\n ...response,\n headers: rawResponse?.headers,\n });\n\n // resolve the object promise with the latest object:\n const validationResult = outputStrategy.validateFinalResult(\n latestObjectJson,\n {\n text: accumulatedText,\n response,\n usage,\n },\n );\n\n if (validationResult.success) {\n object = validationResult.value;\n self.objectPromise.resolve(object);\n } else {\n error = new NoObjectGeneratedError({\n message:\n 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text: accumulatedText,\n response,\n usage,\n });\n self.objectPromise.reject(error);\n }\n\n break;\n }\n\n default: {\n controller.enqueue(chunk);\n break;\n }\n }\n },\n\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n try {\n const finalUsage = usage ?? {\n promptTokens: NaN,\n completionTokens: NaN,\n totalTokens: NaN,\n };\n\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.object': {\n output: () => JSON.stringify(object),\n },\n 'ai.response.id': response.id,\n 'ai.response.model': response.modelId,\n 'ai.response.timestamp':\n response.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': finalUsage.promptTokens,\n 'ai.usage.completionTokens':\n finalUsage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [finishReason],\n 'gen_ai.response.id': response.id,\n 'gen_ai.response.model': response.modelId,\n 'gen_ai.usage.input_tokens': finalUsage.promptTokens,\n 'gen_ai.usage.output_tokens':\n finalUsage.completionTokens,\n },\n }),\n );\n\n // finish doStreamSpan before other operations for correct timing:\n doStreamSpan.end();\n\n // Add response information to the root span:\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.usage.promptTokens': finalUsage.promptTokens,\n 'ai.usage.completionTokens':\n finalUsage.completionTokens,\n 'ai.response.object': {\n output: () => JSON.stringify(object),\n },\n },\n }),\n );\n\n // call onFinish callback:\n await onFinish?.({\n usage: finalUsage,\n object,\n error,\n response: {\n ...response,\n headers: rawResponse?.headers,\n },\n warnings,\n experimental_providerMetadata: providerMetadata,\n });\n } catch (error) {\n controller.error(error);\n } finally {\n rootSpan.end();\n }\n },\n }),\n );\n\n self.stitchableStream.addStream(transformedStream);\n },\n })\n .catch(error => {\n // add an empty stream with an error to break the stream:\n self.stitchableStream.addStream(\n new ReadableStream({\n start(controller) {\n controller.error(error);\n },\n }),\n );\n })\n .finally(() => {\n self.stitchableStream.close();\n });\n\n this.outputStrategy = outputStrategy;\n }\n\n get object() {\n return this.objectPromise.value;\n }\n\n get usage() {\n return this.usagePromise.value;\n }\n\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n\n get warnings() {\n return this.warningsPromise.value;\n }\n\n get request() {\n return this.requestPromise.value;\n }\n\n get response() {\n return this.responsePromise.value;\n }\n\n get partialObjectStream(): AsyncIterableStream<PARTIAL> {\n return createAsyncIterableStream(\n this.stitchableStream.stream.pipeThrough(\n new TransformStream<ObjectStreamPart<PARTIAL>, PARTIAL>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'object':\n controller.enqueue(chunk.object);\n break;\n\n case 'text-delta':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n );\n }\n\n get elementStream(): ELEMENT_STREAM {\n return this.outputStrategy.createElementStream(\n this.stitchableStream.stream,\n );\n }\n\n get textStream(): AsyncIterableStream<string> {\n return createAsyncIterableStream(\n this.stitchableStream.stream.pipeThrough(\n new TransformStream<ObjectStreamPart<PARTIAL>, string>({\n transform(chunk, controller) {\n switch (chunk.type) {\n case 'text-delta':\n controller.enqueue(chunk.textDelta);\n break;\n\n case 'object':\n case 'finish':\n break;\n\n case 'error':\n controller.error(chunk.error);\n break;\n\n default: {\n const _exhaustiveCheck: never = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n );\n }\n\n get fullStream(): AsyncIterableStream<ObjectStreamPart<PARTIAL>> {\n return createAsyncIterableStream(this.stitchableStream.stream);\n }\n\n pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit) {\n writeToServerResponse({\n response,\n status: init?.status,\n statusText: init?.statusText,\n headers: prepareOutgoingHttpHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream()),\n });\n }\n\n toTextStreamResponse(init?: ResponseInit): Response {\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n });\n }\n}\n\nexport type ObjectStreamInputPart =\n | {\n type: 'error';\n error: unknown;\n }\n | {\n type: 'response-metadata';\n id?: string;\n timestamp?: Date;\n modelId?: string;\n }\n | {\n type: 'finish';\n finishReason: FinishReason;\n logprobs?: LogProbs;\n usage: LanguageModelUsage;\n providerMetadata?: ProviderMetadata;\n };\n","/**\n * Delayed promise. It is only constructed once the value is accessed.\n * This is useful to avoid unhandled promise rejections when the promise is created\n * but not accessed.\n */\nexport class DelayedPromise<T> {\n private status:\n | { type: 'pending' }\n | { type: 'resolved'; value: T }\n | { type: 'rejected'; error: unknown } = { type: 'pending' };\n private promise: Promise<T> | undefined;\n private _resolve: undefined | ((value: T) => void) = undefined;\n private _reject: undefined | ((error: unknown) => void) = undefined;\n\n get value(): Promise<T> {\n if (this.promise) {\n return this.promise;\n }\n\n this.promise = new Promise<T>((resolve, reject) => {\n if (this.status.type === 'resolved') {\n resolve(this.status.value);\n } else if (this.status.type === 'rejected') {\n reject(this.status.error);\n }\n\n this._resolve = resolve;\n this._reject = reject;\n });\n\n return this.promise;\n }\n\n resolve(value: T): void {\n this.status = { type: 'resolved', value };\n\n if (this.promise) {\n this._resolve?.(value);\n }\n }\n\n reject(error: unknown): void {\n this.status = { type: 'rejected', error };\n\n if (this.promise) {\n this._reject?.(error);\n }\n }\n}\n","/**\n * Creates a Promise with externally accessible resolve and reject functions.\n *\n * @template T - The type of the value that the Promise will resolve to.\n * @returns An object containing:\n * - promise: A Promise that can be resolved or rejected externally.\n * - resolve: A function to resolve the Promise with a value of type T.\n * - reject: A function to reject the Promise with an error.\n */\nexport function createResolvablePromise<T = any>(): {\n promise: Promise<T>;\n resolve: (value: T) => void;\n reject: (error: unknown) => void;\n} {\n let resolve: (value: T) => void;\n let reject: (error: unknown) => void;\n\n const promise = new Promise<T>((res, rej) => {\n resolve = res;\n reject = rej;\n });\n\n return {\n promise,\n resolve: resolve!,\n reject: reject!,\n };\n}\n","import { createResolvablePromise } from '../../util/create-resolvable-promise';\n\n/**\n * Creates a stitchable stream that can pipe one stream at a time.\n *\n * @template T - The type of values emitted by the streams.\n * @returns {Object} An object containing the stitchable stream and control methods.\n */\nexport function createStitchableStream<T>(): {\n stream: ReadableStream<T>;\n addStream: (innerStream: ReadableStream<T>) => void;\n close: () => void;\n} {\n let innerStreamReaders: ReadableStreamDefaultReader<T>[] = [];\n let controller: ReadableStreamDefaultController<T> | null = null;\n let isClosed = false;\n let waitForNewStream = createResolvablePromise<void>();\n\n const processPull = async () => {\n // Case 1: Outer stream is closed and no more inner streams\n if (isClosed && innerStreamReaders.length === 0) {\n controller?.close();\n return;\n }\n\n // Case 2: No inner streams available, but outer stream is open\n // wait for a new inner stream to be added or the outer stream to close\n if (innerStreamReaders.length === 0) {\n waitForNewStream = createResolvablePromise<void>();\n await waitForNewStream.promise;\n return processPull();\n }\n\n try {\n const { value, done } = await innerStreamReaders[0].read();\n\n if (done) {\n // Case 3: Current inner stream is done\n innerStreamReaders.shift(); // Remove the finished stream\n\n // Continue pulling from the next stream if available\n if (innerStreamReaders.length > 0) {\n await processPull();\n } else if (isClosed) {\n controller?.close();\n }\n } else {\n // Case 4: Current inner stream returns an item\n controller?.enqueue(value);\n }\n } catch (error) {\n // Case 5: Current inner stream throws an error\n controller?.error(error);\n innerStreamReaders.shift(); // Remove the errored stream\n\n if (isClosed && innerStreamReaders.length === 0) {\n controller?.close();\n }\n }\n };\n\n return {\n stream: new ReadableStream<T>({\n start(controllerParam) {\n controller = controllerParam;\n },\n pull: processPull,\n async cancel() {\n for (const reader of innerStreamReaders) {\n await reader.cancel();\n }\n innerStreamReaders = [];\n isClosed = true;\n },\n }),\n addStream: (innerStream: ReadableStream<T>) => {\n if (isClosed) {\n throw new Error('Cannot add inner stream: outer stream is closed');\n }\n\n innerStreamReaders.push(innerStream.getReader());\n waitForNewStream.resolve();\n },\n close: () => {\n isClosed = true;\n waitForNewStream.resolve();\n\n if (innerStreamReaders.length === 0) {\n controller?.close();\n }\n },\n };\n}\n","// Shim for performance.now() to support environments that don't have it:\nexport function now(): number {\n return globalThis?.performance?.now() ?? Date.now();\n}\n","import { createIdGenerator } from '@ai-sdk/provider-utils';\nimport { Tracer } from '@opentelemetry/api';\nimport { InvalidArgumentError, ToolExecutionError } from '../../errors';\nimport { CoreAssistantMessage, CoreMessage, CoreToolMessage } from '../prompt';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { prepareToolsAndToolChoice } from '../prompt/prepare-tools-and-tool-choice';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool/tool';\nimport { CoreToolChoice, LanguageModel, ProviderMetadata } from '../types';\nimport {\n LanguageModelUsage,\n addLanguageModelUsage,\n calculateLanguageModelUsage,\n} from '../types/usage';\nimport { removeTextAfterLastWhitespace } from '../util/remove-text-after-last-whitespace';\nimport { GenerateTextResult } from './generate-text-result';\nimport { Output } from './output';\nimport { parseToolCall } from './parse-tool-call';\nimport { StepResult } from './step-result';\nimport { toResponseMessages } from './to-response-messages';\nimport { ToolCallArray } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultArray } from './tool-result';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aitxt', size: 24 });\n\n/**\nGenerate a text and call tools for a given prompt using a language model.\n\nThis function does not stream the output. If you want to stream the output, use `streamText` instead.\n\n@param model - The language model to use.\n\n@param tools - Tools that are accessible to and can be called by the model. The model needs to support calling tools.\n@param toolChoice - The tool choice strategy. Default: 'auto'.\n\n@param system - A system message that will be part of the prompt.\n@param prompt - A simple text prompt. You can either use `prompt` or `messages` but not both.\n@param messages - A list of messages. You can either use `prompt` or `messages` but not both.\n\n@param maxTokens - Maximum number of tokens to generate.\n@param temperature - Temperature setting.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topP - Nucleus sampling.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topK - Only sample from the top K options for each subsequent token.\nUsed to remove \"long tail\" low probability responses.\nRecommended for advanced use cases only. You usually only need to use temperature.\n@param presencePenalty - Presence penalty setting.\nIt affects the likelihood of the model to repeat information that is already in the prompt.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param frequencyPenalty - Frequency penalty setting.\nIt affects the likelihood of the model to repeatedly use the same words or phrases.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param stopSequences - Stop sequences.\nIf set, the model will stop generating text when one of the stop sequences is generated.\n@param seed - The seed (integer) to use for random sampling.\nIf set and supported by the model, calls will generate deterministic results.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@param maxSteps - Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.\n\n@param onStepFinish - Callback that is called when each step (LLM call) is finished, including intermediate steps.\n\n@returns\nA result object that contains the generated text, the results of the tool calls, and additional information.\n */\nexport async function generateText<\n TOOLS extends Record<string, CoreTool>,\n OUTPUT = never,\n>({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_output: output,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n experimental_activeTools: activeTools,\n experimental_repairToolCall: repairToolCall,\n _internal: {\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n onStepFinish,\n ...settings\n}: CallSettings &\n Prompt & {\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe tools that the model can call. The model needs to support calling tools.\n*/\n tools?: TOOLS;\n\n /**\nThe tool choice strategy. Default: 'auto'.\n */\n toolChoice?: CoreToolChoice<TOOLS>;\n\n /**\nMaximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.\n\nA maximum number is required to prevent infinite loops in the case of misconfigured tools.\n\nBy default, it's set to 1, which means that only a single LLM call is made.\n */\n maxSteps?: number;\n\n /**\nWhen enabled, the model will perform additional steps if the finish reason is \"length\" (experimental).\n\nBy default, it's set to false.\n */\n experimental_continueSteps?: boolean;\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nLimits the tools that are available for the model to call without\nchanging the tool call and result types in the result.\n */\n experimental_activeTools?: Array<keyof TOOLS>;\n\n experimental_output?: Output<OUTPUT>;\n\n /**\nA function that attempts to repair a tool call that failed to parse.\n */\n experimental_repairToolCall?: ToolCallRepairFunction<TOOLS>;\n\n /**\n Callback that is called when each step (LLM call) is finished, including intermediate steps.\n */\n onStepFinish?: (event: StepResult<TOOLS>) => Promise<void> | void;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): Promise<GenerateTextResult<TOOLS, OUTPUT>> {\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxSteps',\n value: maxSteps,\n message: 'maxSteps must be at least 1',\n });\n }\n\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const initialPrompt = standardizePrompt({\n prompt: {\n system: output?.injectIntoSystemPrompt({ system, model }) ?? system,\n prompt,\n messages,\n },\n tools,\n });\n\n const tracer = getTracer(telemetry);\n\n return recordSpan({\n name: 'ai.generateText',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateText',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.settings.maxSteps': maxSteps,\n },\n }),\n tracer,\n fn: async span => {\n const mode = {\n type: 'regular' as const,\n ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools }),\n };\n\n const callSettings = prepareCallSettings(settings);\n\n let currentModelResponse: Awaited<\n ReturnType<LanguageModel['doGenerate']>\n > & { response: { id: string; timestamp: Date; modelId: string } };\n let currentToolCalls: ToolCallArray<TOOLS> = [];\n let currentToolResults: ToolResultArray<TOOLS> = [];\n let stepCount = 0;\n const responseMessages: Array<CoreAssistantMessage | CoreToolMessage> =\n [];\n let text = '';\n const steps: GenerateTextResult<TOOLS, OUTPUT>['steps'] = [];\n let usage: LanguageModelUsage = {\n completionTokens: 0,\n promptTokens: 0,\n totalTokens: 0,\n };\n\n let stepType: 'initial' | 'tool-result' | 'continue' | 'done' = 'initial';\n\n do {\n // after the 1st step, we need to switch to messages format:\n const promptFormat = stepCount === 0 ? initialPrompt.type : 'messages';\n\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages,\n ];\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages,\n },\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n currentModelResponse = await retry(() =>\n recordSpan({\n name: 'ai.generateText.doGenerate',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.generateText.doGenerate',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': { input: () => promptFormat },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.prompt.tools': {\n // convert the language model level tools:\n input: () => mode.tools?.map(tool => JSON.stringify(tool)),\n },\n 'ai.prompt.toolChoice': {\n input: () =>\n mode.toolChoice != null\n ? JSON.stringify(mode.toolChoice)\n : undefined,\n },\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.stop_sequences': settings.stopSequences,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n fn: async span => {\n const result = await model.doGenerate({\n mode,\n ...callSettings,\n inputFormat: promptFormat,\n responseFormat: output?.responseFormat({ model }),\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n });\n\n // Fill in default values:\n const responseData = {\n id: result.response?.id ?? generateId(),\n timestamp: result.response?.timestamp ?? currentDate(),\n modelId: result.response?.modelId ?? model.modelId,\n };\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': result.finishReason,\n 'ai.response.text': {\n output: () => result.text,\n },\n 'ai.response.toolCalls': {\n output: () => JSON.stringify(result.toolCalls),\n },\n 'ai.response.id': responseData.id,\n 'ai.response.model': responseData.modelId,\n 'ai.response.timestamp':\n responseData.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': result.usage.promptTokens,\n 'ai.usage.completionTokens': result.usage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [result.finishReason],\n 'gen_ai.response.id': responseData.id,\n 'gen_ai.response.model': responseData.modelId,\n 'gen_ai.usage.input_tokens': result.usage.promptTokens,\n 'gen_ai.usage.output_tokens': result.usage.completionTokens,\n },\n }),\n );\n\n return { ...result, response: responseData };\n },\n }),\n );\n\n // parse tool calls:\n currentToolCalls = await Promise.all(\n (currentModelResponse.toolCalls ?? []).map(toolCall =>\n parseToolCall({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages: stepInputMessages,\n }),\n ),\n );\n\n // execute tools:\n currentToolResults =\n tools == null\n ? []\n : await executeTools({\n toolCalls: currentToolCalls,\n tools,\n tracer,\n telemetry,\n messages: stepInputMessages,\n abortSignal,\n });\n\n // token usage:\n const currentUsage = calculateLanguageModelUsage(\n currentModelResponse.usage,\n );\n usage = addLanguageModelUsage(usage, currentUsage);\n\n // check if another step is needed:\n let nextStepType: 'done' | 'continue' | 'tool-result' = 'done';\n if (++stepCount < maxSteps) {\n if (\n continueSteps &&\n currentModelResponse.finishReason === 'length' &&\n // only use continue when there are no tool calls:\n currentToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n currentToolCalls.length > 0 &&\n // all current tool calls have results:\n currentToolResults.length === currentToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // text:\n const originalText = currentModelResponse.text ?? '';\n const stepTextLeadingWhitespaceTrimmed =\n stepType === 'continue' && // only for continue steps\n text.trimEnd() !== text // only trim when there is preceding whitespace\n ? originalText.trimStart()\n : originalText;\n const stepText =\n nextStepType === 'continue'\n ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed)\n : stepTextLeadingWhitespaceTrimmed;\n\n text =\n nextStepType === 'continue' || stepType === 'continue'\n ? text + stepText\n : stepText;\n\n // append to messages for potential next step:\n if (stepType === 'continue') {\n // continue step: update the last assistant message\n // continue is only possible when there are no tool calls,\n // so we can assume that there is a single last assistant message:\n const lastMessage = responseMessages[\n responseMessages.length - 1\n ] as CoreAssistantMessage;\n\n if (typeof lastMessage.content === 'string') {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: 'text',\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text,\n tools: tools ?? ({} as TOOLS),\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n }),\n );\n }\n\n // Add step information (after response messages are updated):\n const currentStepResult: StepResult<TOOLS> = {\n stepType,\n text: stepText,\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage: currentUsage,\n warnings: currentModelResponse.warnings,\n logprobs: currentModelResponse.logprobs,\n request: currentModelResponse.request ?? {},\n response: {\n ...currentModelResponse.response,\n headers: currentModelResponse.rawResponse?.headers,\n\n // deep clone msgs to avoid mutating past messages in multi-step:\n messages: JSON.parse(JSON.stringify(responseMessages)),\n },\n experimental_providerMetadata: currentModelResponse.providerMetadata,\n isContinued: nextStepType === 'continue',\n };\n steps.push(currentStepResult);\n await onStepFinish?.(currentStepResult);\n\n stepType = nextStepType;\n } while (stepType !== 'done');\n\n // Add response information to the span:\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': currentModelResponse.finishReason,\n 'ai.response.text': {\n output: () => currentModelResponse.text,\n },\n 'ai.response.toolCalls': {\n output: () => JSON.stringify(currentModelResponse.toolCalls),\n },\n\n 'ai.usage.promptTokens': currentModelResponse.usage.promptTokens,\n 'ai.usage.completionTokens':\n currentModelResponse.usage.completionTokens,\n },\n }),\n );\n\n return new DefaultGenerateTextResult({\n text,\n output:\n output == null\n ? (undefined as never)\n : output.parseOutput(\n { text },\n {\n response: currentModelResponse.response,\n usage,\n },\n ),\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage,\n warnings: currentModelResponse.warnings,\n request: currentModelResponse.request ?? {},\n response: {\n ...currentModelResponse.response,\n headers: currentModelResponse.rawResponse?.headers,\n messages: responseMessages,\n },\n logprobs: currentModelResponse.logprobs,\n steps,\n providerMetadata: currentModelResponse.providerMetadata,\n });\n },\n });\n}\n\nasync function executeTools<TOOLS extends Record<string, CoreTool>>({\n toolCalls,\n tools,\n tracer,\n telemetry,\n messages,\n abortSignal,\n}: {\n toolCalls: ToolCallArray<TOOLS>;\n tools: TOOLS;\n tracer: Tracer;\n telemetry: TelemetrySettings | undefined;\n messages: CoreMessage[];\n abortSignal: AbortSignal | undefined;\n}): Promise<ToolResultArray<TOOLS>> {\n const toolResults = await Promise.all(\n toolCalls.map(async ({ toolCallId, toolName, args }) => {\n const tool = tools[toolName];\n\n if (tool?.execute == null) {\n return undefined;\n }\n\n const result = await recordSpan({\n name: 'ai.toolCall',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.toolCall',\n telemetry,\n }),\n 'ai.toolCall.name': toolName,\n 'ai.toolCall.id': toolCallId,\n 'ai.toolCall.args': {\n output: () => JSON.stringify(args),\n },\n },\n }),\n tracer,\n fn: async span => {\n try {\n const result = await tool.execute!(args, {\n toolCallId,\n messages,\n abortSignal,\n });\n\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.toolCall.result': {\n output: () => JSON.stringify(result),\n },\n },\n }),\n );\n } catch (ignored) {\n // JSON stringify might fail if the result is not serializable,\n // in which case we just ignore it. In the future we might want to\n // add an optional serialize method to the tool interface and warn\n // if the result is not serializable.\n }\n\n return result;\n } catch (error) {\n throw new ToolExecutionError({\n toolName,\n toolArgs: args,\n cause: error,\n });\n }\n },\n });\n\n return {\n type: 'tool-result',\n toolCallId,\n toolName,\n args,\n result,\n } as ToolResultArray<TOOLS>[number];\n }),\n );\n\n return toolResults.filter(\n (result): result is NonNullable<typeof result> => result != null,\n );\n}\n\nclass DefaultGenerateTextResult<TOOLS extends Record<string, CoreTool>, OUTPUT>\n implements GenerateTextResult<TOOLS, OUTPUT>\n{\n readonly text: GenerateTextResult<TOOLS, OUTPUT>['text'];\n readonly toolCalls: GenerateTextResult<TOOLS, OUTPUT>['toolCalls'];\n readonly toolResults: GenerateTextResult<TOOLS, OUTPUT>['toolResults'];\n readonly finishReason: GenerateTextResult<TOOLS, OUTPUT>['finishReason'];\n readonly usage: GenerateTextResult<TOOLS, OUTPUT>['usage'];\n readonly warnings: GenerateTextResult<TOOLS, OUTPUT>['warnings'];\n readonly steps: GenerateTextResult<TOOLS, OUTPUT>['steps'];\n readonly logprobs: GenerateTextResult<TOOLS, OUTPUT>['logprobs'];\n readonly experimental_providerMetadata: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_providerMetadata'];\n readonly response: GenerateTextResult<TOOLS, OUTPUT>['response'];\n readonly request: GenerateTextResult<TOOLS, OUTPUT>['request'];\n readonly experimental_output: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_output'];\n\n constructor(options: {\n text: GenerateTextResult<TOOLS, OUTPUT>['text'];\n toolCalls: GenerateTextResult<TOOLS, OUTPUT>['toolCalls'];\n toolResults: GenerateTextResult<TOOLS, OUTPUT>['toolResults'];\n finishReason: GenerateTextResult<TOOLS, OUTPUT>['finishReason'];\n usage: GenerateTextResult<TOOLS, OUTPUT>['usage'];\n warnings: GenerateTextResult<TOOLS, OUTPUT>['warnings'];\n logprobs: GenerateTextResult<TOOLS, OUTPUT>['logprobs'];\n steps: GenerateTextResult<TOOLS, OUTPUT>['steps'];\n providerMetadata: GenerateTextResult<\n TOOLS,\n OUTPUT\n >['experimental_providerMetadata'];\n response: GenerateTextResult<TOOLS, OUTPUT>['response'];\n request: GenerateTextResult<TOOLS, OUTPUT>['request'];\n output: GenerateTextResult<TOOLS, OUTPUT>['experimental_output'];\n }) {\n this.text = options.text;\n this.toolCalls = options.toolCalls;\n this.toolResults = options.toolResults;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.request = options.request;\n this.response = options.response;\n this.steps = options.steps;\n this.experimental_providerMetadata = options.providerMetadata;\n this.logprobs = options.logprobs;\n this.experimental_output = options.output;\n }\n}\n","export {\n AISDKError,\n APICallError,\n EmptyResponseBodyError,\n InvalidPromptError,\n InvalidResponseDataError,\n JSONParseError,\n LoadAPIKeyError,\n NoContentGeneratedError,\n NoSuchModelError,\n TypeValidationError,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport { InvalidArgumentError } from './invalid-argument-error';\nexport { InvalidToolArgumentsError } from './invalid-tool-arguments-error';\nexport { NoObjectGeneratedError } from './no-object-generated-error';\nexport { NoSuchToolError } from './no-such-tool-error';\nexport { ToolCallRepairError } from './tool-call-repair-error';\nexport { ToolExecutionError } from './tool-execution-error';\n\nexport { InvalidDataContentError } from '../core/prompt/invalid-data-content-error';\nexport { InvalidMessageRoleError } from '../core/prompt/invalid-message-role-error';\nexport { MessageConversionError } from '../core/prompt/message-conversion-error';\nexport { DownloadError } from '../util/download-error';\nexport { RetryError } from '../util/retry-error';\n","import { AISDKError, getErrorMessage } from '@ai-sdk/provider';\n\nconst name = 'AI_InvalidToolArgumentsError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class InvalidToolArgumentsError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly toolArgs: string;\n\n constructor({\n toolArgs,\n toolName,\n cause,\n message = `Invalid arguments for tool ${toolName}: ${getErrorMessage(\n cause,\n )}`,\n }: {\n message?: string;\n toolArgs: string;\n toolName: string;\n cause: unknown;\n }) {\n super({ name, message, cause });\n\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n }\n\n static isInstance(error: unknown): error is InvalidToolArgumentsError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from '@ai-sdk/provider';\n\nconst name = 'AI_NoSuchToolError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchToolError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly availableTools: string[] | undefined;\n\n constructor({\n toolName,\n availableTools = undefined,\n message = `Model tried to call unavailable tool '${toolName}'. ${\n availableTools === undefined\n ? 'No tools are available.'\n : `Available tools: ${availableTools.join(', ')}.`\n }`,\n }: {\n toolName: string;\n availableTools?: string[] | undefined;\n message?: string;\n }) {\n super({ name, message });\n\n this.toolName = toolName;\n this.availableTools = availableTools;\n }\n\n static isInstance(error: unknown): error is NoSuchToolError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError, getErrorMessage } from '@ai-sdk/provider';\nimport { InvalidToolArgumentsError } from './invalid-tool-arguments-error';\nimport { NoSuchToolError } from './no-such-tool-error';\n\nconst name = 'AI_ToolCallRepairError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class ToolCallRepairError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly originalError: NoSuchToolError | InvalidToolArgumentsError;\n\n constructor({\n cause,\n originalError,\n message = `Error repairing tool call: ${getErrorMessage(cause)}`,\n }: {\n message?: string;\n cause: unknown;\n originalError: NoSuchToolError | InvalidToolArgumentsError;\n }) {\n super({ name, message, cause });\n this.originalError = originalError;\n }\n\n static isInstance(error: unknown): error is ToolCallRepairError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError, getErrorMessage, JSONValue } from '@ai-sdk/provider';\n\nconst name = 'AI_ToolExecutionError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class ToolExecutionError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly toolName: string;\n readonly toolArgs: JSONValue;\n\n constructor({\n toolArgs,\n toolName,\n cause,\n message = `Error executing tool ${toolName}: ${getErrorMessage(cause)}`,\n }: {\n message?: string;\n toolArgs: JSONValue;\n toolName: string;\n cause: unknown;\n }) {\n super({ name, message, cause });\n\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n }\n\n static isInstance(error: unknown): error is ToolExecutionError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import {\n LanguageModelV1FunctionTool,\n LanguageModelV1ProviderDefinedTool,\n LanguageModelV1ToolChoice,\n} from '@ai-sdk/provider';\nimport { asSchema } from '@ai-sdk/ui-utils';\nimport { CoreTool } from '../tool/tool';\nimport { CoreToolChoice } from '../types/language-model';\nimport { isNonEmptyObject } from '../util/is-non-empty-object';\n\nexport function prepareToolsAndToolChoice<\n TOOLS extends Record<string, CoreTool>,\n>({\n tools,\n toolChoice,\n activeTools,\n}: {\n tools: TOOLS | undefined;\n toolChoice: CoreToolChoice<TOOLS> | undefined;\n activeTools: Array<keyof TOOLS> | undefined;\n}): {\n tools:\n | Array<LanguageModelV1FunctionTool | LanguageModelV1ProviderDefinedTool>\n | undefined;\n toolChoice: LanguageModelV1ToolChoice | undefined;\n} {\n if (!isNonEmptyObject(tools)) {\n return {\n tools: undefined,\n toolChoice: undefined,\n };\n }\n\n // when activeTools is provided, we only include the tools that are in the list:\n const filteredTools =\n activeTools != null\n ? Object.entries(tools).filter(([name]) =>\n activeTools.includes(name as keyof TOOLS),\n )\n : Object.entries(tools);\n\n return {\n tools: filteredTools.map(([name, tool]) => {\n const toolType = tool.type;\n switch (toolType) {\n case undefined:\n case 'function':\n return {\n type: 'function' as const,\n name,\n description: tool.description,\n parameters: asSchema(tool.parameters).jsonSchema,\n };\n case 'provider-defined':\n return {\n type: 'provider-defined' as const,\n name,\n id: tool.id,\n args: tool.args,\n };\n default: {\n const exhaustiveCheck: never = toolType;\n throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);\n }\n }\n }),\n toolChoice:\n toolChoice == null\n ? { type: 'auto' }\n : typeof toolChoice === 'string'\n ? { type: toolChoice }\n : { type: 'tool' as const, toolName: toolChoice.toolName as string },\n };\n}\n","export function isNonEmptyObject(\n object: Record<string, unknown> | undefined | null,\n): object is Record<string, unknown> {\n return object != null && Object.keys(object).length > 0;\n}\n","const lastWhitespaceRegexp = /^([\\s\\S]*?)(\\s+)(\\S*)$/;\n\n/**\n * Splits the text on the last whitespace.\n *\n * Whitespace is defined as one or more whitespace characters,\n * e.g. space, tab, newline, etc.\n *\n * @param text - The text to split.\n * @returns The prefix, whitespace, and suffix. Undefined if there is no whitespace.\n */\nexport function splitOnLastWhitespace(text: string):\n | {\n prefix: string;\n whitespace: string;\n suffix: string;\n }\n | undefined {\n const match = text.match(lastWhitespaceRegexp);\n return match\n ? { prefix: match[1], whitespace: match[2], suffix: match[3] }\n : undefined;\n}\n","import { splitOnLastWhitespace } from './split-on-last-whitespace';\n\nexport function removeTextAfterLastWhitespace(text: string): string {\n const match = splitOnLastWhitespace(text);\n return match ? match.prefix + match.whitespace : text;\n}\n","import { LanguageModelV1FunctionToolCall } from '@ai-sdk/provider';\nimport { safeParseJSON, safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { Schema, asSchema } from '@ai-sdk/ui-utils';\nimport { InvalidToolArgumentsError } from '../../errors/invalid-tool-arguments-error';\nimport { NoSuchToolError } from '../../errors/no-such-tool-error';\nimport { CoreMessage } from '../prompt';\nimport { CoreTool } from '../tool';\nimport { inferParameters } from '../tool/tool';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolCallRepairError } from '../../errors/tool-call-repair-error';\n\nexport async function parseToolCall<TOOLS extends Record<string, CoreTool>>({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages,\n}: {\n toolCall: LanguageModelV1FunctionToolCall;\n tools: TOOLS | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n system: string | undefined;\n messages: CoreMessage[];\n}): Promise<ToolCallUnion<TOOLS>> {\n if (tools == null) {\n throw new NoSuchToolError({ toolName: toolCall.toolName });\n }\n\n try {\n return await doParseToolCall({ toolCall, tools });\n } catch (error) {\n if (\n repairToolCall == null ||\n !(\n NoSuchToolError.isInstance(error) ||\n InvalidToolArgumentsError.isInstance(error)\n )\n ) {\n throw error;\n }\n\n let repairedToolCall: LanguageModelV1FunctionToolCall | null = null;\n\n try {\n repairedToolCall = await repairToolCall({\n toolCall,\n tools,\n parameterSchema: ({ toolName }) =>\n asSchema(tools[toolName].parameters).jsonSchema,\n system,\n messages,\n error,\n });\n } catch (repairError) {\n throw new ToolCallRepairError({\n cause: repairError,\n originalError: error,\n });\n }\n\n // no repaired tool call returned\n if (repairedToolCall == null) {\n throw error;\n }\n\n return await doParseToolCall({ toolCall: repairedToolCall, tools });\n }\n}\n\nasync function doParseToolCall<TOOLS extends Record<string, CoreTool>>({\n toolCall,\n tools,\n}: {\n toolCall: LanguageModelV1FunctionToolCall;\n tools: TOOLS;\n}): Promise<ToolCallUnion<TOOLS>> {\n const toolName = toolCall.toolName as keyof TOOLS & string;\n\n const tool = tools[toolName];\n\n if (tool == null) {\n throw new NoSuchToolError({\n toolName: toolCall.toolName,\n availableTools: Object.keys(tools),\n });\n }\n\n const schema = asSchema(tool.parameters) as Schema<\n inferParameters<TOOLS[keyof TOOLS]['parameters']>\n >;\n\n // when the tool call has no arguments, we try passing an empty object to the schema\n // (many LLMs generate empty strings for tool calls with no arguments)\n const parseResult =\n toolCall.args.trim() === ''\n ? safeValidateTypes({ value: {}, schema })\n : safeParseJSON({ text: toolCall.args, schema });\n\n if (parseResult.success === false) {\n throw new InvalidToolArgumentsError({\n toolName,\n toolArgs: toolCall.args,\n cause: parseResult.error,\n });\n }\n\n return {\n type: 'tool-call',\n toolCallId: toolCall.toolCallId,\n toolName,\n args: parseResult.value,\n };\n}\n","import {\n CoreAssistantMessage,\n CoreToolMessage,\n ToolResultPart,\n} from '../prompt';\nimport { CoreTool } from '../tool/tool';\nimport { ToolCallArray } from './tool-call';\nimport { ToolResultArray } from './tool-result';\n\n/**\nConverts the result of a `generateText` call to a list of response messages.\n */\nexport function toResponseMessages<TOOLS extends Record<string, CoreTool>>({\n text = '',\n tools,\n toolCalls,\n toolResults,\n}: {\n text: string | undefined;\n tools: TOOLS;\n toolCalls: ToolCallArray<TOOLS>;\n toolResults: ToolResultArray<TOOLS>;\n}): Array<CoreAssistantMessage | CoreToolMessage> {\n const responseMessages: Array<CoreAssistantMessage | CoreToolMessage> = [];\n\n responseMessages.push({\n role: 'assistant',\n content: [{ type: 'text', text }, ...toolCalls],\n });\n\n if (toolResults.length > 0) {\n responseMessages.push({\n role: 'tool',\n content: toolResults.map((toolResult): ToolResultPart => {\n const tool = tools[toolResult.toolName];\n return tool?.experimental_toToolResultContent != null\n ? {\n type: 'tool-result',\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: tool.experimental_toToolResultContent(toolResult.result),\n experimental_content: tool.experimental_toToolResultContent(\n toolResult.result,\n ),\n }\n : {\n type: 'tool-result',\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: toolResult.result,\n };\n }),\n });\n }\n\n return responseMessages;\n}\n","import { safeParseJSON, safeValidateTypes } from '@ai-sdk/provider-utils';\nimport { asSchema, Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { NoObjectGeneratedError } from '../../errors';\nimport { injectJsonInstruction } from '../generate-object/inject-json-instruction';\nimport {\n LanguageModel,\n LanguageModelV1CallOptions,\n} from '../types/language-model';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { LanguageModelUsage } from '../types/usage';\n\nexport interface Output<OUTPUT> {\n readonly type: 'object' | 'text';\n injectIntoSystemPrompt(options: {\n system: string | undefined;\n model: LanguageModel;\n }): string | undefined;\n responseFormat: (options: {\n model: LanguageModel;\n }) => LanguageModelV1CallOptions['responseFormat'];\n parseOutput(\n options: { text: string },\n context: {\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ): OUTPUT;\n}\n\nexport const text = (): Output<string> => ({\n type: 'text',\n responseFormat: () => ({ type: 'text' }),\n injectIntoSystemPrompt({ system }: { system: string | undefined }) {\n return system;\n },\n parseOutput({ text }: { text: string }) {\n return text;\n },\n});\n\nexport const object = <OUTPUT>({\n schema: inputSchema,\n}: {\n schema: z.Schema<OUTPUT, z.ZodTypeDef, any> | Schema<OUTPUT>;\n}): Output<OUTPUT> => {\n const schema = asSchema(inputSchema);\n\n return {\n type: 'object',\n responseFormat: ({ model }) => ({\n type: 'json',\n schema: model.supportsStructuredOutputs ? schema.jsonSchema : undefined,\n }),\n injectIntoSystemPrompt({ system, model }) {\n // when the model supports structured outputs,\n // we can use the system prompt as is:\n return model.supportsStructuredOutputs\n ? system\n : injectJsonInstruction({\n prompt: system,\n schema: schema.jsonSchema,\n });\n },\n parseOutput(\n { text }: { text: string },\n context: {\n response: LanguageModelResponseMetadata;\n usage: LanguageModelUsage;\n },\n ) {\n const parseResult = safeParseJSON({ text });\n\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: could not parse the response.',\n cause: parseResult.error,\n text,\n response: context.response,\n usage: context.usage,\n });\n }\n\n const validationResult = safeValidateTypes({\n value: parseResult.value,\n schema,\n });\n\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: 'No object generated: response did not match schema.',\n cause: validationResult.error,\n text,\n response: context.response,\n usage: context.usage,\n });\n }\n\n return validationResult.value;\n },\n };\n};\n","import { createIdGenerator } from '@ai-sdk/provider-utils';\nimport { DataStreamString, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { Span } from '@opentelemetry/api';\nimport { ServerResponse } from 'node:http';\nimport { InvalidArgumentError } from '../../errors/invalid-argument-error';\nimport { StreamData } from '../../streams/stream-data';\nimport { DelayedPromise } from '../../util/delayed-promise';\nimport { DataStreamWriter } from '../data-stream/data-stream-writer';\nimport { CallSettings } from '../prompt/call-settings';\nimport { convertToLanguageModelPrompt } from '../prompt/convert-to-language-model-prompt';\nimport { CoreAssistantMessage, CoreToolMessage } from '../prompt/message';\nimport { prepareCallSettings } from '../prompt/prepare-call-settings';\nimport { prepareRetries } from '../prompt/prepare-retries';\nimport { prepareToolsAndToolChoice } from '../prompt/prepare-tools-and-tool-choice';\nimport { Prompt } from '../prompt/prompt';\nimport { standardizePrompt } from '../prompt/standardize-prompt';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { getBaseTelemetryAttributes } from '../telemetry/get-base-telemetry-attributes';\nimport { getTracer } from '../telemetry/get-tracer';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool';\nimport {\n CoreToolChoice,\n FinishReason,\n LanguageModel,\n LogProbs,\n} from '../types/language-model';\nimport { LanguageModelRequestMetadata } from '../types/language-model-request-metadata';\nimport { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';\nimport { ProviderMetadata } from '../types/provider-metadata';\nimport { addLanguageModelUsage, LanguageModelUsage } from '../types/usage';\nimport {\n AsyncIterableStream,\n createAsyncIterableStream,\n} from '../util/async-iterable-stream';\nimport { createStitchableStream } from '../util/create-stitchable-stream';\nimport { mergeStreams } from '../util/merge-streams';\nimport { now as originalNow } from '../util/now';\nimport { prepareOutgoingHttpHeaders } from '../util/prepare-outgoing-http-headers';\nimport { prepareResponseHeaders } from '../util/prepare-response-headers';\nimport { splitOnLastWhitespace } from '../util/split-on-last-whitespace';\nimport { writeToServerResponse } from '../util/write-to-server-response';\nimport {\n runToolsTransformation,\n SingleRequestTextStreamPart,\n} from './run-tools-transformation';\nimport { StepResult } from './step-result';\nimport { StreamTextResult, TextStreamPart } from './stream-text-result';\nimport { toResponseMessages } from './to-response-messages';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultUnion } from './tool-result';\n\nconst originalGenerateId = createIdGenerator({ prefix: 'aitxt', size: 24 });\n\n/**\nGenerate a text and call tools for a given prompt using a language model.\n\nThis function streams the output. If you do not want to stream the output, use `generateText` instead.\n\n@param model - The language model to use.\n@param tools - Tools that are accessible to and can be called by the model. The model needs to support calling tools.\n\n@param system - A system message that will be part of the prompt.\n@param prompt - A simple text prompt. You can either use `prompt` or `messages` but not both.\n@param messages - A list of messages. You can either use `prompt` or `messages` but not both.\n\n@param maxTokens - Maximum number of tokens to generate.\n@param temperature - Temperature setting.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topP - Nucleus sampling.\nThe value is passed through to the provider. The range depends on the provider and model.\nIt is recommended to set either `temperature` or `topP`, but not both.\n@param topK - Only sample from the top K options for each subsequent token.\nUsed to remove \"long tail\" low probability responses.\nRecommended for advanced use cases only. You usually only need to use temperature.\n@param presencePenalty - Presence penalty setting.\nIt affects the likelihood of the model to repeat information that is already in the prompt.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param frequencyPenalty - Frequency penalty setting.\nIt affects the likelihood of the model to repeatedly use the same words or phrases.\nThe value is passed through to the provider. The range depends on the provider and model.\n@param stopSequences - Stop sequences.\nIf set, the model will stop generating text when one of the stop sequences is generated.\n@param seed - The seed (integer) to use for random sampling.\nIf set and supported by the model, calls will generate deterministic results.\n\n@param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.\n@param abortSignal - An optional abort signal that can be used to cancel the call.\n@param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.\n\n@param maxSteps - Maximum number of sequential LLM calls (steps), e.g. when you use tool calls.\n\n@param onChunk - Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.\n@param onStepFinish - Callback that is called when each step (LLM call) is finished, including intermediate steps.\n@param onFinish - Callback that is called when the LLM response and all request tool executions\n(for tools that have an `execute` function) are finished.\n\n@return\nA result object for accessing different stream types and additional information.\n */\nexport function streamText<TOOLS extends Record<string, CoreTool>>({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata: providerMetadata,\n experimental_toolCallStreaming: toolCallStreaming = false,\n experimental_activeTools: activeTools,\n experimental_repairToolCall: repairToolCall,\n experimental_transform: transform,\n onChunk,\n onFinish,\n onStepFinish,\n _internal: {\n now = originalNow,\n generateId = originalGenerateId,\n currentDate = () => new Date(),\n } = {},\n ...settings\n}: CallSettings &\n Prompt & {\n /**\nThe language model to use.\n */\n model: LanguageModel;\n\n /**\nThe tools that the model can call. The model needs to support calling tools.\n */\n tools?: TOOLS;\n\n /**\nThe tool choice strategy. Default: 'auto'.\n */\n toolChoice?: CoreToolChoice<TOOLS>;\n\n /**\nMaximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.\n\nA maximum number is required to prevent infinite loops in the case of misconfigured tools.\n\nBy default, it's set to 1, which means that only a single LLM call is made.\n */\n maxSteps?: number;\n\n /**\nWhen enabled, the model will perform additional steps if the finish reason is \"length\" (experimental).\n\nBy default, it's set to false.\n */\n experimental_continueSteps?: boolean;\n\n /**\nOptional telemetry configuration (experimental).\n */\n experimental_telemetry?: TelemetrySettings;\n\n /**\nAdditional provider-specific metadata. They are passed through\nto the provider from the AI SDK and enable provider-specific\nfunctionality that can be fully encapsulated in the provider.\n */\n experimental_providerMetadata?: ProviderMetadata;\n\n /**\nLimits the tools that are available for the model to call without\nchanging the tool call and result types in the result.\n */\n experimental_activeTools?: Array<keyof TOOLS>;\n\n /**\nA function that attempts to repair a tool call that failed to parse.\n */\n experimental_repairToolCall?: ToolCallRepairFunction<TOOLS>;\n\n /**\nEnable streaming of tool call deltas as they are generated. Disabled by default.\n */\n experimental_toolCallStreaming?: boolean;\n\n /**\nOptional transformation that is applied to the stream.\n */\n experimental_transform?: (options: {\n tools: TOOLS; // for type inference\n }) => TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>;\n\n /**\nCallback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.\n */\n onChunk?: (event: {\n chunk: Extract<\n TextStreamPart<TOOLS>,\n {\n type:\n | 'text-delta'\n | 'tool-call'\n | 'tool-call-streaming-start'\n | 'tool-call-delta'\n | 'tool-result';\n }\n >;\n }) => Promise<void> | void;\n\n /**\nCallback that is called when the LLM response and all request tool executions\n(for tools that have an `execute` function) are finished.\n\nThe usage is the combined usage of all steps.\n */\n onFinish?: (\n event: Omit<StepResult<TOOLS>, 'stepType' | 'isContinued'> & {\n /**\nDetails for all steps.\n */\n readonly steps: StepResult<TOOLS>[];\n },\n ) => Promise<void> | void;\n\n /**\n Callback that is called when each step (LLM call) is finished, including intermediate steps.\n */\n onStepFinish?: (event: StepResult<TOOLS>) => Promise<void> | void;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n now?: () => number;\n generateId?: () => string;\n currentDate?: () => Date;\n };\n }): StreamTextResult<TOOLS> {\n return new DefaultStreamTextResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transform: transform?.({ tools: tools as TOOLS }),\n activeTools,\n repairToolCall,\n maxSteps,\n continueSteps,\n providerMetadata,\n onChunk,\n onFinish,\n onStepFinish,\n now,\n currentDate,\n generateId,\n });\n}\n\nclass DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>>\n implements StreamTextResult<TOOLS>\n{\n private readonly warningsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['warnings']>\n >();\n private readonly usagePromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['usage']>\n >();\n private readonly finishReasonPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['finishReason']>\n >();\n private readonly providerMetadataPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['experimental_providerMetadata']>\n >();\n private readonly textPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['text']>\n >();\n private readonly toolCallsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['toolCalls']>\n >();\n private readonly toolResultsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['toolResults']>\n >();\n private readonly requestPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['request']>\n >();\n private readonly responsePromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['response']>\n >();\n private readonly stepsPromise = new DelayedPromise<\n Awaited<StreamTextResult<TOOLS>['steps']>\n >();\n\n private readonly addStream: (\n stream: ReadableStream<TextStreamPart<TOOLS>>,\n ) => void;\n\n private readonly closeStream: () => void;\n\n private baseStream: ReadableStream<TextStreamPart<TOOLS>>;\n\n constructor({\n model,\n telemetry,\n headers,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transform,\n activeTools,\n repairToolCall,\n maxSteps,\n continueSteps,\n providerMetadata,\n onChunk,\n onFinish,\n onStepFinish,\n now,\n currentDate,\n generateId,\n }: {\n model: LanguageModel;\n telemetry: TelemetrySettings | undefined;\n headers: Record<string, string | undefined> | undefined;\n settings: Omit<CallSettings, 'abortSignal' | 'headers'>;\n maxRetries: number | undefined;\n abortSignal: AbortSignal | undefined;\n system: Prompt['system'];\n prompt: Prompt['prompt'];\n messages: Prompt['messages'];\n tools: TOOLS | undefined;\n toolChoice: CoreToolChoice<TOOLS> | undefined;\n toolCallStreaming: boolean;\n transform:\n | TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>\n | undefined;\n activeTools: Array<keyof TOOLS> | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n maxSteps: number;\n continueSteps: boolean;\n providerMetadata: ProviderMetadata | undefined;\n onChunk:\n | undefined\n | ((event: {\n chunk: Extract<\n TextStreamPart<TOOLS>,\n {\n type:\n | 'text-delta'\n | 'tool-call'\n | 'tool-call-streaming-start'\n | 'tool-call-delta'\n | 'tool-result';\n }\n >;\n }) => Promise<void> | void);\n onFinish:\n | undefined\n | ((\n event: Omit<StepResult<TOOLS>, 'stepType' | 'isContinued'> & {\n readonly steps: StepResult<TOOLS>[];\n },\n ) => Promise<void> | void);\n onStepFinish:\n | undefined\n | ((event: StepResult<TOOLS>) => Promise<void> | void);\n now: () => number;\n currentDate: () => Date;\n generateId: () => string;\n }) {\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: 'maxSteps',\n value: maxSteps,\n message: 'maxSteps must be at least 1',\n });\n }\n\n // event processor for telemetry, invoking callbacks, etc.\n // The event processor reads the transformed stream to enable correct\n // recording of the final transformed outputs.\n let recordedStepText = '';\n let recordedContinuationText = '';\n let recordedFullText = '';\n let recordedRequest: LanguageModelRequestMetadata | undefined = undefined;\n const recordedResponse: LanguageModelResponseMetadata & {\n messages: Array<CoreAssistantMessage | CoreToolMessage>;\n } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n messages: [],\n };\n let recordedToolCalls: ToolCallUnion<TOOLS>[] = [];\n let recordedToolResults: ToolResultUnion<TOOLS>[] = [];\n let recordedFinishReason: FinishReason | undefined = undefined;\n let recordedUsage: LanguageModelUsage | undefined = undefined;\n let recordedProviderMetadata: ProviderMetadata | undefined = undefined;\n let stepType: 'initial' | 'continue' | 'tool-result' = 'initial';\n const recordedSteps: StepResult<TOOLS>[] = [];\n let rootSpan!: Span;\n\n const eventProcessor = new TransformStream<\n TextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller) {\n controller.enqueue(chunk); // forward the chunk to the next stream\n\n if (\n chunk.type === 'text-delta' ||\n chunk.type === 'tool-call' ||\n chunk.type === 'tool-result' ||\n chunk.type === 'tool-call-streaming-start' ||\n chunk.type === 'tool-call-delta'\n ) {\n await onChunk?.({ chunk });\n }\n\n if (chunk.type === 'text-delta') {\n recordedStepText += chunk.textDelta;\n recordedContinuationText += chunk.textDelta;\n recordedFullText += chunk.textDelta;\n }\n\n if (chunk.type === 'tool-call') {\n recordedToolCalls.push(chunk);\n }\n\n if (chunk.type === 'tool-result') {\n recordedToolResults.push(chunk);\n }\n\n if (chunk.type === 'step-finish') {\n const stepMessages = toResponseMessages({\n text: recordedContinuationText,\n tools: tools ?? ({} as TOOLS),\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n });\n\n // determine the next step type\n const currentStep = recordedSteps.length;\n let nextStepType: 'done' | 'continue' | 'tool-result' = 'done';\n if (currentStep + 1 < maxSteps) {\n if (\n continueSteps &&\n chunk.finishReason === 'length' &&\n // only use continue when there are no tool calls:\n recordedToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n recordedToolCalls.length > 0 &&\n // all current tool calls have results:\n recordedToolResults.length === recordedToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // Add step information (after response messages are updated):\n const currentStepResult: StepResult<TOOLS> = {\n stepType,\n text: recordedStepText,\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n finishReason: chunk.finishReason,\n usage: chunk.usage,\n warnings: chunk.warnings,\n logprobs: chunk.logprobs,\n request: chunk.request,\n response: {\n ...chunk.response,\n messages: [...recordedResponse.messages, ...stepMessages],\n },\n experimental_providerMetadata: chunk.experimental_providerMetadata,\n isContinued: chunk.isContinued,\n };\n\n await onStepFinish?.(currentStepResult);\n\n recordedSteps.push(currentStepResult);\n\n recordedToolCalls = [];\n recordedToolResults = [];\n recordedStepText = '';\n recordedRequest = chunk.request;\n\n if (nextStepType !== 'done') {\n stepType = nextStepType;\n }\n\n if (nextStepType !== 'continue') {\n recordedResponse.messages.push(...stepMessages);\n recordedContinuationText = '';\n }\n }\n\n if (chunk.type === 'finish') {\n recordedResponse.id = chunk.response.id;\n recordedResponse.timestamp = chunk.response.timestamp;\n recordedResponse.modelId = chunk.response.modelId;\n recordedResponse.headers = chunk.response.headers;\n recordedUsage = chunk.usage;\n recordedFinishReason = chunk.finishReason;\n recordedProviderMetadata = chunk.experimental_providerMetadata;\n }\n },\n\n async flush(controller) {\n try {\n // from last step (when there are errors there may be no last step)\n const lastStep = recordedSteps[recordedSteps.length - 1];\n if (lastStep) {\n self.warningsPromise.resolve(lastStep.warnings);\n self.requestPromise.resolve(lastStep.request);\n self.responsePromise.resolve(lastStep.response);\n self.toolCallsPromise.resolve(lastStep.toolCalls);\n self.toolResultsPromise.resolve(lastStep.toolResults);\n self.providerMetadataPromise.resolve(\n lastStep.experimental_providerMetadata,\n );\n }\n\n // derived:\n const finishReason = recordedFinishReason ?? 'unknown';\n const usage = recordedUsage ?? {\n completionTokens: NaN,\n promptTokens: NaN,\n totalTokens: NaN,\n };\n\n // from finish:\n self.finishReasonPromise.resolve(finishReason);\n self.usagePromise.resolve(usage);\n\n // aggregate results:\n self.textPromise.resolve(recordedFullText);\n self.stepsPromise.resolve(recordedSteps);\n\n // call onFinish callback:\n await onFinish?.({\n finishReason,\n logprobs: undefined,\n usage,\n text: recordedFullText,\n toolCalls: lastStep.toolCalls,\n toolResults: lastStep.toolResults,\n request: lastStep.request ?? {},\n response: lastStep.response,\n warnings: lastStep.warnings,\n experimental_providerMetadata:\n lastStep.experimental_providerMetadata,\n steps: recordedSteps,\n });\n\n // Add response information to the root span:\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': finishReason,\n 'ai.response.text': { output: () => recordedFullText },\n 'ai.response.toolCalls': {\n output: () =>\n lastStep.toolCalls?.length\n ? JSON.stringify(lastStep.toolCalls)\n : undefined,\n },\n\n 'ai.usage.promptTokens': usage.promptTokens,\n 'ai.usage.completionTokens': usage.completionTokens,\n },\n }),\n );\n } catch (error) {\n controller.error(error);\n } finally {\n rootSpan.end();\n }\n },\n });\n\n // initialize the stitchable stream and the transformed stream:\n const stitchableStream = createStitchableStream<TextStreamPart<TOOLS>>();\n this.addStream = stitchableStream.addStream;\n this.closeStream = stitchableStream.close;\n this.baseStream = (\n transform\n ? stitchableStream.stream.pipeThrough(transform)\n : stitchableStream.stream\n ).pipeThrough(eventProcessor);\n\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg,\n });\n\n const tracer = getTracer(telemetry);\n\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries },\n });\n\n const initialPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools,\n });\n\n const self = this;\n\n recordSpan({\n name: 'ai.streamText',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: 'ai.streamText', telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n 'ai.prompt': {\n input: () => JSON.stringify({ system, prompt, messages }),\n },\n 'ai.settings.maxSteps': maxSteps,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async rootSpanArg => {\n rootSpan = rootSpanArg;\n\n async function streamStep({\n currentStep,\n responseMessages,\n usage,\n stepType,\n previousStepText,\n hasLeadingWhitespace,\n }: {\n currentStep: number;\n responseMessages: Array<CoreAssistantMessage | CoreToolMessage>;\n usage: LanguageModelUsage;\n stepType: 'initial' | 'continue' | 'tool-result';\n previousStepText: string;\n hasLeadingWhitespace: boolean;\n }) {\n // after the 1st step, we need to switch to messages format:\n const promptFormat =\n responseMessages.length === 0 ? initialPrompt.type : 'messages';\n\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages,\n ];\n\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages,\n },\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: model.supportsUrl,\n });\n\n const mode = {\n type: 'regular' as const,\n ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools }),\n };\n\n const {\n result: { stream, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs,\n } = await retry(() =>\n recordSpan({\n name: 'ai.streamText.doStream',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.streamText.doStream',\n telemetry,\n }),\n ...baseTelemetryAttributes,\n 'ai.prompt.format': {\n input: () => promptFormat,\n },\n 'ai.prompt.messages': {\n input: () => JSON.stringify(promptMessages),\n },\n 'ai.prompt.tools': {\n // convert the language model level tools:\n input: () => mode.tools?.map(tool => JSON.stringify(tool)),\n },\n 'ai.prompt.toolChoice': {\n input: () =>\n mode.toolChoice != null\n ? JSON.stringify(mode.toolChoice)\n : undefined,\n },\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.system': model.provider,\n 'gen_ai.request.model': model.modelId,\n 'gen_ai.request.frequency_penalty': settings.frequencyPenalty,\n 'gen_ai.request.max_tokens': settings.maxTokens,\n 'gen_ai.request.presence_penalty': settings.presencePenalty,\n 'gen_ai.request.stop_sequences': settings.stopSequences,\n 'gen_ai.request.temperature': settings.temperature,\n 'gen_ai.request.top_k': settings.topK,\n 'gen_ai.request.top_p': settings.topP,\n },\n }),\n tracer,\n endWhenDone: false,\n fn: async doStreamSpan => ({\n startTimestampMs: now(), // get before the call\n doStreamSpan,\n result: await model.doStream({\n mode,\n ...prepareCallSettings(settings),\n inputFormat: promptFormat,\n prompt: promptMessages,\n providerMetadata,\n abortSignal,\n headers,\n }),\n }),\n }),\n );\n\n const transformedStream = runToolsTransformation({\n tools,\n generatorStream: stream,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages: stepInputMessages,\n repairToolCall,\n abortSignal,\n });\n\n const stepRequest = request ?? {};\n const stepToolCalls: ToolCallUnion<TOOLS>[] = [];\n const stepToolResults: ToolResultUnion<TOOLS>[] = [];\n let stepFinishReason: FinishReason = 'unknown';\n let stepUsage: LanguageModelUsage = {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0,\n };\n let stepProviderMetadata: ProviderMetadata | undefined;\n let stepFirstChunk = true;\n let stepText = '';\n let fullStepText = stepType === 'continue' ? previousStepText : '';\n let stepLogProbs: LogProbs | undefined;\n let stepResponse: { id: string; timestamp: Date; modelId: string } = {\n id: generateId(),\n timestamp: currentDate(),\n modelId: model.modelId,\n };\n\n // chunk buffer when using continue:\n let chunkBuffer = '';\n let chunkTextPublished = false;\n let inWhitespacePrefix = true;\n let hasWhitespaceSuffix = false; // for next step. when true, step ended with whitespace\n\n async function publishTextChunk({\n controller,\n chunk,\n }: {\n controller: TransformStreamDefaultController<TextStreamPart<TOOLS>>;\n chunk: TextStreamPart<TOOLS> & { type: 'text-delta' };\n }) {\n controller.enqueue(chunk);\n\n stepText += chunk.textDelta;\n fullStepText += chunk.textDelta;\n chunkTextPublished = true;\n hasWhitespaceSuffix = chunk.textDelta.trimEnd() !== chunk.textDelta;\n }\n\n self.addStream(\n transformedStream.pipeThrough(\n new TransformStream<\n SingleRequestTextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller): Promise<void> {\n // Telemetry for first chunk:\n if (stepFirstChunk) {\n const msToFirstChunk = now() - startTimestampMs;\n\n stepFirstChunk = false;\n\n doStreamSpan.addEvent('ai.stream.firstChunk', {\n 'ai.response.msToFirstChunk': msToFirstChunk,\n });\n\n doStreamSpan.setAttributes({\n 'ai.response.msToFirstChunk': msToFirstChunk,\n });\n }\n\n // Filter out empty text deltas\n if (\n chunk.type === 'text-delta' &&\n chunk.textDelta.length === 0\n ) {\n return;\n }\n\n const chunkType = chunk.type;\n switch (chunkType) {\n case 'text-delta': {\n if (continueSteps) {\n // when a new step starts, leading whitespace is to be discarded\n // when there is already preceding whitespace in the chunk buffer\n const trimmedChunkText =\n inWhitespacePrefix && hasLeadingWhitespace\n ? chunk.textDelta.trimStart()\n : chunk.textDelta;\n\n if (trimmedChunkText.length === 0) {\n break;\n }\n\n inWhitespacePrefix = false;\n chunkBuffer += trimmedChunkText;\n\n const split = splitOnLastWhitespace(chunkBuffer);\n\n // publish the text until the last whitespace:\n if (split != null) {\n chunkBuffer = split.suffix;\n\n await publishTextChunk({\n controller,\n chunk: {\n type: 'text-delta',\n textDelta: split.prefix + split.whitespace,\n },\n });\n }\n } else {\n await publishTextChunk({ controller, chunk });\n }\n\n break;\n }\n\n case 'tool-call': {\n controller.enqueue(chunk);\n // store tool calls for onFinish callback and toolCalls promise:\n stepToolCalls.push(chunk);\n break;\n }\n\n case 'tool-result': {\n controller.enqueue(chunk);\n // store tool results for onFinish callback and toolResults promise:\n stepToolResults.push(chunk);\n break;\n }\n\n case 'response-metadata': {\n stepResponse = {\n id: chunk.id ?? stepResponse.id,\n timestamp: chunk.timestamp ?? stepResponse.timestamp,\n modelId: chunk.modelId ?? stepResponse.modelId,\n };\n break;\n }\n\n case 'finish': {\n // Note: tool executions might not be finished yet when the finish event is emitted.\n // store usage and finish reason for promises and onFinish callback:\n stepUsage = chunk.usage;\n stepFinishReason = chunk.finishReason;\n stepProviderMetadata =\n chunk.experimental_providerMetadata;\n stepLogProbs = chunk.logprobs;\n\n // Telemetry for finish event timing\n // (since tool executions can take longer and distort calculations)\n const msToFinish = now() - startTimestampMs;\n doStreamSpan.addEvent('ai.stream.finish');\n doStreamSpan.setAttributes({\n 'ai.response.msToFinish': msToFinish,\n 'ai.response.avgCompletionTokensPerSecond':\n (1000 * stepUsage.completionTokens) / msToFinish,\n });\n\n break;\n }\n\n case 'tool-call-streaming-start':\n case 'tool-call-delta': {\n controller.enqueue(chunk);\n break;\n }\n\n case 'error': {\n controller.enqueue(chunk);\n stepFinishReason = 'error';\n break;\n }\n\n default: {\n const exhaustiveCheck: never = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n },\n\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n const stepToolCallsJson =\n stepToolCalls.length > 0\n ? JSON.stringify(stepToolCalls)\n : undefined;\n\n // determine the next step type\n let nextStepType: 'done' | 'continue' | 'tool-result' =\n 'done';\n if (currentStep + 1 < maxSteps) {\n if (\n continueSteps &&\n stepFinishReason === 'length' &&\n // only use continue when there are no tool calls:\n stepToolCalls.length === 0\n ) {\n nextStepType = 'continue';\n } else if (\n // there are tool calls:\n stepToolCalls.length > 0 &&\n // all current tool calls have results:\n stepToolResults.length === stepToolCalls.length\n ) {\n nextStepType = 'tool-result';\n }\n }\n\n // when using continuation, publish buffer on final step or if there\n // was no whitespace in the step:\n if (\n continueSteps &&\n chunkBuffer.length > 0 &&\n (nextStepType !== 'continue' || // when the next step is a regular step, publish the buffer\n (stepType === 'continue' && !chunkTextPublished)) // when the next step is a continue step, publish the buffer if no text was published in the step\n ) {\n await publishTextChunk({\n controller,\n chunk: {\n type: 'text-delta',\n textDelta: chunkBuffer,\n },\n });\n chunkBuffer = '';\n }\n\n // record telemetry information first to ensure best effort timing\n try {\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.response.finishReason': stepFinishReason,\n 'ai.response.text': { output: () => stepText },\n 'ai.response.toolCalls': {\n output: () => stepToolCallsJson,\n },\n 'ai.response.id': stepResponse.id,\n 'ai.response.model': stepResponse.modelId,\n 'ai.response.timestamp':\n stepResponse.timestamp.toISOString(),\n\n 'ai.usage.promptTokens': stepUsage.promptTokens,\n 'ai.usage.completionTokens':\n stepUsage.completionTokens,\n\n // standardized gen-ai llm span attributes:\n 'gen_ai.response.finish_reasons': [stepFinishReason],\n 'gen_ai.response.id': stepResponse.id,\n 'gen_ai.response.model': stepResponse.modelId,\n 'gen_ai.usage.input_tokens': stepUsage.promptTokens,\n 'gen_ai.usage.output_tokens':\n stepUsage.completionTokens,\n },\n }),\n );\n } catch (error) {\n // ignore error setting telemetry attributes\n } finally {\n // finish doStreamSpan before other operations for correct timing:\n doStreamSpan.end();\n }\n\n controller.enqueue({\n type: 'step-finish',\n finishReason: stepFinishReason,\n usage: stepUsage,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n request: stepRequest,\n response: {\n ...stepResponse,\n headers: rawResponse?.headers,\n },\n warnings,\n isContinued: nextStepType === 'continue',\n });\n\n const combinedUsage = addLanguageModelUsage(usage, stepUsage);\n\n if (nextStepType === 'done') {\n controller.enqueue({\n type: 'finish',\n finishReason: stepFinishReason,\n usage: combinedUsage,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n response: {\n ...stepResponse,\n headers: rawResponse?.headers,\n },\n });\n\n self.closeStream(); // close the stitchable stream\n } else {\n // append to messages for the next step:\n if (stepType === 'continue') {\n // continue step: update the last assistant message\n // continue is only possible when there are no tool calls,\n // so we can assume that there is a single last assistant message:\n const lastMessage = responseMessages[\n responseMessages.length - 1\n ] as CoreAssistantMessage;\n\n if (typeof lastMessage.content === 'string') {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: 'text',\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text: stepText,\n tools: tools ?? ({} as TOOLS),\n toolCalls: stepToolCalls,\n toolResults: stepToolResults,\n }),\n );\n }\n\n await streamStep({\n currentStep: currentStep + 1,\n responseMessages,\n usage: combinedUsage,\n stepType: nextStepType,\n previousStepText: fullStepText,\n hasLeadingWhitespace: hasWhitespaceSuffix,\n });\n }\n },\n }),\n ),\n );\n }\n\n // add the initial stream to the stitchable stream\n await streamStep({\n currentStep: 0,\n responseMessages: [],\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0,\n },\n previousStepText: '',\n stepType: 'initial',\n hasLeadingWhitespace: false,\n });\n },\n }).catch(error => {\n // add an error stream part and close the streams:\n self.addStream(\n new ReadableStream({\n start(controller) {\n controller.enqueue({ type: 'error', error });\n controller.close();\n },\n }),\n );\n self.closeStream();\n });\n }\n\n get warnings() {\n return this.warningsPromise.value;\n }\n\n get usage() {\n return this.usagePromise.value;\n }\n\n get finishReason() {\n return this.finishReasonPromise.value;\n }\n\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n\n get text() {\n return this.textPromise.value;\n }\n\n get toolCalls() {\n return this.toolCallsPromise.value;\n }\n\n get toolResults() {\n return this.toolResultsPromise.value;\n }\n\n get request() {\n return this.requestPromise.value;\n }\n\n get response() {\n return this.responsePromise.value;\n }\n\n get steps() {\n return this.stepsPromise.value;\n }\n\n /**\nSplit out a new stream from the original stream.\nThe original stream is replaced to allow for further splitting,\nsince we do not know how many times the stream will be split.\n\nNote: this leads to buffering the stream content on the server.\nHowever, the LLM results are expected to be small enough to not cause issues.\n */\n private teeStream() {\n const [stream1, stream2] = this.baseStream.tee();\n this.baseStream = stream2;\n return stream1;\n }\n\n get textStream(): AsyncIterableStream<string> {\n return createAsyncIterableStream(\n this.teeStream().pipeThrough(\n new TransformStream<TextStreamPart<TOOLS>, string>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n controller.enqueue(chunk.textDelta);\n } else if (chunk.type === 'error') {\n controller.error(chunk.error);\n }\n },\n }),\n ),\n );\n }\n\n get fullStream(): AsyncIterableStream<TextStreamPart<TOOLS>> {\n return createAsyncIterableStream(this.teeStream());\n }\n\n private toDataStreamInternal({\n getErrorMessage = () => 'An error occurred.', // mask error messages for safety by default\n sendUsage = true,\n }: {\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n } = {}): ReadableStream<DataStreamString> {\n let aggregatedResponse = '';\n\n const callbackTransformer = new TransformStream<\n TextStreamPart<TOOLS>,\n TextStreamPart<TOOLS>\n >({\n async transform(chunk, controller): Promise<void> {\n controller.enqueue(chunk);\n\n if (chunk.type === 'text-delta') {\n aggregatedResponse += chunk.textDelta;\n }\n },\n });\n\n const streamPartsTransformer = new TransformStream<\n TextStreamPart<TOOLS>,\n DataStreamString\n >({\n transform: async (chunk, controller) => {\n const chunkType = chunk.type;\n switch (chunkType) {\n case 'text-delta': {\n controller.enqueue(formatDataStreamPart('text', chunk.textDelta));\n break;\n }\n\n case 'tool-call-streaming-start': {\n controller.enqueue(\n formatDataStreamPart('tool_call_streaming_start', {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n }),\n );\n break;\n }\n\n case 'tool-call-delta': {\n controller.enqueue(\n formatDataStreamPart('tool_call_delta', {\n toolCallId: chunk.toolCallId,\n argsTextDelta: chunk.argsTextDelta,\n }),\n );\n break;\n }\n\n case 'tool-call': {\n controller.enqueue(\n formatDataStreamPart('tool_call', {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n args: chunk.args,\n }),\n );\n break;\n }\n\n case 'tool-result': {\n controller.enqueue(\n formatDataStreamPart('tool_result', {\n toolCallId: chunk.toolCallId,\n result: chunk.result,\n }),\n );\n break;\n }\n\n case 'error': {\n controller.enqueue(\n formatDataStreamPart('error', getErrorMessage(chunk.error)),\n );\n break;\n }\n\n case 'step-finish': {\n controller.enqueue(\n formatDataStreamPart('finish_step', {\n finishReason: chunk.finishReason,\n usage: sendUsage\n ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens,\n }\n : undefined,\n isContinued: chunk.isContinued,\n }),\n );\n break;\n }\n\n case 'finish': {\n controller.enqueue(\n formatDataStreamPart('finish_message', {\n finishReason: chunk.finishReason,\n usage: sendUsage\n ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens,\n }\n : undefined,\n }),\n );\n break;\n }\n\n default: {\n const exhaustiveCheck: never = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n },\n });\n\n return this.fullStream\n .pipeThrough(callbackTransformer)\n .pipeThrough(streamPartsTransformer);\n }\n\n pipeDataStreamToResponse(\n response: ServerResponse,\n {\n status,\n statusText,\n headers,\n data,\n getErrorMessage,\n sendUsage,\n }: ResponseInit & {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean; // default to true (change to false in v4: secure by default)\n } = {},\n ) {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n stream: this.toDataStream({ data, getErrorMessage, sendUsage }),\n });\n }\n\n pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit) {\n writeToServerResponse({\n response,\n status: init?.status,\n statusText: init?.statusText,\n headers: prepareOutgoingHttpHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream()),\n });\n }\n\n // TODO breaking change 5.0: remove pipeThrough(new TextEncoderStream())\n toDataStream(options?: {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n }) {\n const stream = this.toDataStreamInternal({\n getErrorMessage: options?.getErrorMessage,\n sendUsage: options?.sendUsage,\n }).pipeThrough(new TextEncoderStream());\n\n return options?.data ? mergeStreams(options?.data.stream, stream) : stream;\n }\n\n mergeIntoDataStream(writer: DataStreamWriter) {\n writer.merge(\n this.toDataStreamInternal({\n getErrorMessage: writer.onError,\n }),\n );\n }\n\n toDataStreamResponse({\n headers,\n status,\n statusText,\n data,\n getErrorMessage,\n sendUsage,\n }: ResponseInit & {\n data?: StreamData;\n getErrorMessage?: (error: unknown) => string;\n sendUsage?: boolean;\n } = {}): Response {\n return new Response(\n this.toDataStream({ data, getErrorMessage, sendUsage }),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n },\n );\n }\n\n toTextStreamResponse(init?: ResponseInit): Response {\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: init?.status ?? 200,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n }),\n });\n }\n}\n","/**\n * Merges two readable streams into a single readable stream, emitting values\n * from each stream as they become available.\n *\n * The first stream is prioritized over the second stream. If both streams have\n * values available, the first stream's value is emitted first.\n *\n * @template VALUE1 - The type of values emitted by the first stream.\n * @template VALUE2 - The type of values emitted by the second stream.\n * @param {ReadableStream<VALUE1>} stream1 - The first readable stream.\n * @param {ReadableStream<VALUE2>} stream2 - The second readable stream.\n * @returns {ReadableStream<VALUE1 | VALUE2>} A new readable stream that emits values from both input streams.\n */\nexport function mergeStreams<VALUE1, VALUE2>(\n stream1: ReadableStream<VALUE1>,\n stream2: ReadableStream<VALUE2>,\n): ReadableStream<VALUE1 | VALUE2> {\n const reader1 = stream1.getReader();\n const reader2 = stream2.getReader();\n\n let lastRead1: Promise<ReadableStreamReadResult<VALUE1>> | undefined =\n undefined;\n let lastRead2: Promise<ReadableStreamReadResult<VALUE2>> | undefined =\n undefined;\n\n let stream1Done = false;\n let stream2Done = false;\n\n // only use when stream 2 is done:\n async function readStream1(\n controller: ReadableStreamDefaultController<VALUE1 | VALUE2>,\n ) {\n try {\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n\n const result = await lastRead1;\n lastRead1 = undefined;\n\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n\n // only use when stream 1 is done:\n async function readStream2(\n controller: ReadableStreamDefaultController<VALUE1 | VALUE2>,\n ) {\n try {\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n\n const result = await lastRead2;\n lastRead2 = undefined;\n\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n\n return new ReadableStream<VALUE1 | VALUE2>({\n async pull(controller) {\n try {\n // stream 1 is done, we can only read from stream 2:\n if (stream1Done) {\n await readStream2(controller);\n return;\n }\n\n // stream 2 is done, we can only read from stream 1:\n if (stream2Done) {\n await readStream1(controller);\n return;\n }\n\n // pull the next value from the stream that was read last:\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n\n // Note on Promise.race (prioritizing stream 1 over stream 2):\n // If the iterable contains one or more non-promise values and/or an already settled promise,\n // then Promise.race() will settle to the first of these values found in the iterable.\n const { result, reader } = await Promise.race([\n lastRead1.then(result => ({ result, reader: reader1 })),\n lastRead2.then(result => ({ result, reader: reader2 })),\n ]);\n\n if (!result.done) {\n controller.enqueue(result.value);\n }\n\n if (reader === reader1) {\n lastRead1 = undefined;\n if (result.done) {\n // stream 1 is done, we can only read from stream 2:\n await readStream2(controller);\n stream1Done = true;\n }\n } else {\n lastRead2 = undefined;\n // stream 2 is done, we can only read from stream 1:\n if (result.done) {\n stream2Done = true;\n await readStream1(controller);\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader1.cancel();\n reader2.cancel();\n },\n });\n}\n","import { LanguageModelV1StreamPart } from '@ai-sdk/provider';\nimport { generateId } from '@ai-sdk/ui-utils';\nimport { Tracer } from '@opentelemetry/api';\nimport { ToolExecutionError } from '../../errors';\nimport { CoreMessage } from '../prompt/message';\nimport { assembleOperationName } from '../telemetry/assemble-operation-name';\nimport { recordSpan } from '../telemetry/record-span';\nimport { selectTelemetryAttributes } from '../telemetry/select-telemetry-attributes';\nimport { TelemetrySettings } from '../telemetry/telemetry-settings';\nimport { CoreTool } from '../tool';\nimport {\n FinishReason,\n LanguageModelUsage,\n LogProbs,\n ProviderMetadata,\n} from '../types';\nimport { calculateLanguageModelUsage } from '../types/usage';\nimport { parseToolCall } from './parse-tool-call';\nimport { ToolCallUnion } from './tool-call';\nimport { ToolCallRepairFunction } from './tool-call-repair';\nimport { ToolResultUnion } from './tool-result';\n\nexport type SingleRequestTextStreamPart<\n TOOLS extends Record<string, CoreTool>,\n> =\n | {\n type: 'text-delta';\n textDelta: string;\n }\n | ({\n type: 'tool-call';\n } & ToolCallUnion<TOOLS>)\n | {\n type: 'tool-call-streaming-start';\n toolCallId: string;\n toolName: string;\n }\n | {\n type: 'tool-call-delta';\n toolCallId: string;\n toolName: string;\n argsTextDelta: string;\n }\n | ({\n type: 'tool-result';\n } & ToolResultUnion<TOOLS>)\n | {\n type: 'response-metadata';\n id?: string;\n timestamp?: Date;\n modelId?: string;\n }\n | {\n type: 'finish';\n finishReason: FinishReason;\n logprobs?: LogProbs;\n usage: LanguageModelUsage;\n experimental_providerMetadata?: ProviderMetadata;\n }\n | {\n type: 'error';\n error: unknown;\n };\n\nexport function runToolsTransformation<TOOLS extends Record<string, CoreTool>>({\n tools,\n generatorStream,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages,\n abortSignal,\n repairToolCall,\n}: {\n tools: TOOLS | undefined;\n generatorStream: ReadableStream<LanguageModelV1StreamPart>;\n toolCallStreaming: boolean;\n tracer: Tracer;\n telemetry: TelemetrySettings | undefined;\n system: string | undefined;\n messages: CoreMessage[];\n abortSignal: AbortSignal | undefined;\n repairToolCall: ToolCallRepairFunction<TOOLS> | undefined;\n}): ReadableStream<SingleRequestTextStreamPart<TOOLS>> {\n // tool results stream\n let toolResultsStreamController: ReadableStreamDefaultController<\n SingleRequestTextStreamPart<TOOLS>\n > | null = null;\n const toolResultsStream = new ReadableStream<\n SingleRequestTextStreamPart<TOOLS>\n >({\n start(controller) {\n toolResultsStreamController = controller;\n },\n });\n\n // keep track of active tool calls for tool call streaming:\n const activeToolCalls: Record<string, boolean> = {};\n\n // keep track of outstanding tool results for stream closing:\n const outstandingToolResults = new Set<string>();\n\n let canClose = false;\n let finishChunk:\n | (SingleRequestTextStreamPart<TOOLS> & { type: 'finish' })\n | undefined = undefined;\n\n function attemptClose() {\n // close the tool results controller if no more outstanding tool calls\n if (canClose && outstandingToolResults.size === 0) {\n // we delay sending the finish chunk until all tool results (incl. delayed ones)\n // are received to ensure that the frontend receives tool results before a message\n // finish event arrives.\n if (finishChunk != null) {\n toolResultsStreamController!.enqueue(finishChunk);\n }\n\n toolResultsStreamController!.close();\n }\n }\n\n // forward stream\n const forwardStream = new TransformStream<\n LanguageModelV1StreamPart,\n SingleRequestTextStreamPart<TOOLS>\n >({\n async transform(\n chunk: LanguageModelV1StreamPart,\n controller: TransformStreamDefaultController<\n SingleRequestTextStreamPart<TOOLS>\n >,\n ) {\n const chunkType = chunk.type;\n\n switch (chunkType) {\n // forward:\n case 'text-delta':\n case 'response-metadata':\n case 'error': {\n controller.enqueue(chunk);\n break;\n }\n\n // forward with less information:\n case 'tool-call-delta': {\n if (toolCallStreaming) {\n if (!activeToolCalls[chunk.toolCallId]) {\n controller.enqueue({\n type: 'tool-call-streaming-start',\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n });\n\n activeToolCalls[chunk.toolCallId] = true;\n }\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n argsTextDelta: chunk.argsTextDelta,\n });\n }\n break;\n }\n\n // process tool call:\n case 'tool-call': {\n try {\n const toolCall = await parseToolCall({\n toolCall: chunk,\n tools,\n repairToolCall,\n system,\n messages,\n });\n\n controller.enqueue(toolCall);\n\n const tool = tools![toolCall.toolName];\n\n if (tool.execute != null) {\n const toolExecutionId = generateId(); // use our own id to guarantee uniqueness\n outstandingToolResults.add(toolExecutionId);\n\n // Note: we don't await the tool execution here (by leaving out 'await' on recordSpan),\n // because we want to process the next chunk as soon as possible.\n // This is important for the case where the tool execution takes a long time.\n recordSpan({\n name: 'ai.toolCall',\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: 'ai.toolCall',\n telemetry,\n }),\n 'ai.toolCall.name': toolCall.toolName,\n 'ai.toolCall.id': toolCall.toolCallId,\n 'ai.toolCall.args': {\n output: () => JSON.stringify(toolCall.args),\n },\n },\n }),\n tracer,\n fn: async span =>\n tool.execute!(toolCall.args, {\n toolCallId: toolCall.toolCallId,\n messages,\n abortSignal,\n }).then(\n (result: any) => {\n toolResultsStreamController!.enqueue({\n ...toolCall,\n type: 'tool-result',\n result,\n } as any);\n\n outstandingToolResults.delete(toolExecutionId);\n\n attemptClose();\n\n // record telemetry\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n 'ai.toolCall.result': {\n output: () => JSON.stringify(result),\n },\n },\n }),\n );\n } catch (ignored) {\n // JSON stringify might fail if the result is not serializable,\n // in which case we just ignore it. In the future we might want to\n // add an optional serialize method to the tool interface and warn\n // if the result is not serializable.\n }\n },\n (error: any) => {\n toolResultsStreamController!.enqueue({\n type: 'error',\n error: new ToolExecutionError({\n toolName: toolCall.toolName,\n toolArgs: toolCall.args,\n cause: error,\n }),\n });\n\n outstandingToolResults.delete(toolExecutionId);\n attemptClose();\n },\n ),\n });\n }\n } catch (error) {\n toolResultsStreamController!.enqueue({\n type: 'error',\n error,\n });\n }\n\n break;\n }\n\n case 'finish': {\n finishChunk = {\n type: 'finish',\n finishReason: chunk.finishReason,\n logprobs: chunk.logprobs,\n usage: calculateLanguageModelUsage(chunk.usage),\n experimental_providerMetadata: chunk.providerMetadata,\n };\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = chunkType;\n throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n\n flush() {\n canClose = true;\n attemptClose();\n },\n });\n\n // combine the generator stream and the tool results stream\n return new ReadableStream<SingleRequestTextStreamPart<TOOLS>>({\n async start(controller) {\n // need to wait for both pipes so there are no dangling promises that\n // can cause uncaught promise rejections when the stream is aborted\n return Promise.all([\n generatorStream.pipeThrough(forwardStream).pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n // the generator stream controller is automatically closed when it's consumed\n },\n }),\n ),\n toolResultsStream.pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n controller.close();\n },\n }),\n ),\n ]);\n },\n });\n}\n","import { delay as originalDelay } from '../../util/delay';\nimport { CoreTool } from '../tool/tool';\nimport { TextStreamPart } from './stream-text-result';\n\n/**\n * Smooths text streaming output.\n *\n * @param delayInMs - The delay in milliseconds between each chunk. Defaults to 10ms.\n * @returns A transform stream that smooths text streaming output.\n */\nexport function smoothStream<TOOLS extends Record<string, CoreTool>>({\n delayInMs = 10,\n _internal: { delay = originalDelay } = {},\n}: {\n delayInMs?: number;\n\n /**\n * Internal. For test use only. May change without notice.\n */\n _internal?: {\n delay?: (delayInMs: number) => Promise<void>;\n };\n} = {}): (options: {\n tools: TOOLS;\n}) => TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>> {\n let buffer = '';\n\n return () =>\n new TransformStream<TextStreamPart<TOOLS>, TextStreamPart<TOOLS>>({\n async transform(chunk, controller) {\n if (chunk.type === 'step-finish') {\n if (buffer.length > 0) {\n controller.enqueue({ type: 'text-delta', textDelta: buffer });\n buffer = '';\n }\n\n controller.enqueue(chunk);\n return;\n }\n\n if (chunk.type !== 'text-delta') {\n controller.enqueue(chunk);\n return;\n }\n\n buffer += chunk.textDelta;\n\n // Stream out complete words including their optional leading\n // and required trailing whitespace sequences\n const regexp = /\\s*\\S+\\s+/m;\n while (regexp.test(buffer)) {\n const chunk = buffer.match(regexp)![0];\n controller.enqueue({ type: 'text-delta', textDelta: chunk });\n buffer = buffer.slice(chunk.length);\n\n if (delayInMs > 0) {\n await delay(delayInMs);\n }\n }\n },\n });\n}\n","import { LanguageModelV1, LanguageModelV1CallOptions } from '@ai-sdk/provider';\nimport { Experimental_LanguageModelV1Middleware } from './language-model-v1-middleware';\n\n/**\n * Wraps a LanguageModelV1 instance with middleware functionality.\n * This function allows you to apply middleware to transform parameters,\n * wrap generate operations, and wrap stream operations of a language model.\n *\n * @param options - Configuration options for wrapping the language model.\n * @param options.model - The original LanguageModelV1 instance to be wrapped.\n * @param options.middleware - The middleware to be applied to the language model.\n * @param options.modelId - Optional custom model ID to override the original model's ID.\n * @param options.providerId - Optional custom provider ID to override the original model's provider.\n * @returns A new LanguageModelV1 instance with middleware applied.\n */\nexport const experimental_wrapLanguageModel = ({\n model,\n middleware: { transformParams, wrapGenerate, wrapStream },\n modelId,\n providerId,\n}: {\n model: LanguageModelV1;\n middleware: Experimental_LanguageModelV1Middleware;\n modelId?: string;\n providerId?: string;\n}): LanguageModelV1 => {\n async function doTransform({\n params,\n type,\n }: {\n params: LanguageModelV1CallOptions;\n type: 'generate' | 'stream';\n }) {\n return transformParams ? await transformParams({ params, type }) : params;\n }\n\n return {\n specificationVersion: 'v1',\n\n provider: providerId ?? model.provider,\n modelId: modelId ?? model.modelId,\n\n defaultObjectGenerationMode: model.defaultObjectGenerationMode,\n supportsImageUrls: model.supportsImageUrls,\n supportsUrl: model.supportsUrl,\n supportsStructuredOutputs: model.supportsStructuredOutputs,\n\n async doGenerate(\n params: LanguageModelV1CallOptions,\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const transformedParams = await doTransform({ params, type: 'generate' });\n const doGenerate = async () => model.doGenerate(transformedParams);\n return wrapGenerate\n ? wrapGenerate({ doGenerate, params: transformedParams, model })\n : doGenerate();\n },\n\n async doStream(\n params: LanguageModelV1CallOptions,\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const transformedParams = await doTransform({ params, type: 'stream' });\n const doStream = async () => model.doStream(transformedParams);\n return wrapStream\n ? wrapStream({ doStream, params: transformedParams, model })\n : doStream();\n },\n };\n};\n","import { EmbeddingModelV1, LanguageModelV1 } from '@ai-sdk/provider';\nimport { Provider } from '../types';\nimport { NoSuchModelError } from '@ai-sdk/provider';\n\n/**\n * Creates a custom provider with specified language models, text embedding models, and an optional fallback provider.\n *\n * @param {Object} options - The options for creating the custom provider.\n * @param {Record<string, LanguageModelV1>} [options.languageModels] - A record of language models, where keys are model IDs and values are LanguageModelV1 instances.\n * @param {Record<string, EmbeddingModelV1<string>>} [options.textEmbeddingModels] - A record of text embedding models, where keys are model IDs and values are EmbeddingModelV1<string> instances.\n * @param {Provider} [options.fallbackProvider] - An optional fallback provider to use when a requested model is not found in the custom provider.\n * @returns {Provider} A Provider object with languageModel and textEmbeddingModel methods.\n *\n * @throws {NoSuchModelError} Throws when a requested model is not found and no fallback provider is available.\n */\nexport function experimental_customProvider({\n languageModels,\n textEmbeddingModels,\n fallbackProvider,\n}: {\n languageModels?: Record<string, LanguageModelV1>;\n textEmbeddingModels?: Record<string, EmbeddingModelV1<string>>;\n fallbackProvider?: Provider;\n}): Provider {\n return {\n languageModel(modelId: string): LanguageModelV1 {\n if (languageModels != null && modelId in languageModels) {\n return languageModels[modelId];\n }\n\n if (fallbackProvider) {\n return fallbackProvider.languageModel(modelId);\n }\n\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n },\n\n textEmbeddingModel(modelId: string): EmbeddingModelV1<string> {\n if (textEmbeddingModels != null && modelId in textEmbeddingModels) {\n return textEmbeddingModels[modelId];\n }\n\n if (fallbackProvider) {\n return fallbackProvider.textEmbeddingModel(modelId);\n }\n\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n },\n };\n}\n","import { AISDKError, NoSuchModelError } from '@ai-sdk/provider';\n\nconst name = 'AI_NoSuchProviderError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchProviderError extends NoSuchModelError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly providerId: string;\n readonly availableProviders: string[];\n\n constructor({\n modelId,\n modelType,\n providerId,\n availableProviders,\n message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`,\n }: {\n modelId: string;\n modelType: 'languageModel' | 'textEmbeddingModel';\n providerId: string;\n availableProviders: string[];\n message?: string;\n }) {\n super({ errorName: name, modelId, modelType, message });\n\n this.providerId = providerId;\n this.availableProviders = availableProviders;\n }\n\n static isInstance(error: unknown): error is NoSuchProviderError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { NoSuchModelError } from '@ai-sdk/provider';\nimport { EmbeddingModel, LanguageModel, Provider } from '../types';\nimport { NoSuchProviderError } from './no-such-provider-error';\n\n/**\n * Creates a registry for the given providers.\n */\nexport function experimental_createProviderRegistry(\n providers: Record<string, Provider>,\n): Provider {\n const registry = new DefaultProviderRegistry();\n\n for (const [id, provider] of Object.entries(providers)) {\n registry.registerProvider({ id, provider });\n }\n\n return registry;\n}\n\nclass DefaultProviderRegistry implements Provider {\n private providers: Record<string, Provider> = {};\n\n registerProvider({ id, provider }: { id: string; provider: Provider }): void {\n this.providers[id] = provider;\n }\n\n private getProvider(id: string): Provider {\n const provider = this.providers[id];\n\n if (provider == null) {\n throw new NoSuchProviderError({\n modelId: id,\n modelType: 'languageModel',\n providerId: id,\n availableProviders: Object.keys(this.providers),\n });\n }\n\n return provider;\n }\n\n private splitId(\n id: string,\n modelType: 'languageModel' | 'textEmbeddingModel',\n ): [string, string] {\n const index = id.indexOf(':');\n\n if (index === -1) {\n throw new NoSuchModelError({\n modelId: id,\n modelType,\n message:\n `Invalid ${modelType} id for registry: ${id} ` +\n `(must be in the format \"providerId:modelId\")`,\n });\n }\n\n return [id.slice(0, index), id.slice(index + 1)];\n }\n\n languageModel(id: string): LanguageModel {\n const [providerId, modelId] = this.splitId(id, 'languageModel');\n const model = this.getProvider(providerId).languageModel?.(modelId);\n\n if (model == null) {\n throw new NoSuchModelError({ modelId: id, modelType: 'languageModel' });\n }\n\n return model;\n }\n\n textEmbeddingModel(id: string): EmbeddingModel<string> {\n const [providerId, modelId] = this.splitId(id, 'textEmbeddingModel');\n const provider = this.getProvider(providerId);\n\n const model = provider.textEmbeddingModel?.(modelId);\n\n if (model == null) {\n throw new NoSuchModelError({\n modelId: id,\n modelType: 'textEmbeddingModel',\n });\n }\n\n return model;\n }\n\n /**\n * @deprecated Use `textEmbeddingModel` instead.\n */\n textEmbedding(id: string): EmbeddingModel<string> {\n return this.textEmbeddingModel(id);\n }\n}\n","import { Schema } from '@ai-sdk/ui-utils';\nimport { z } from 'zod';\nimport { ToolResultContent } from '../prompt/tool-result-content';\nimport { CoreMessage } from '../prompt/message';\n\ntype Parameters = z.ZodTypeAny | Schema<any>;\n\nexport type inferParameters<PARAMETERS extends Parameters> =\n PARAMETERS extends Schema<any>\n ? PARAMETERS['_type']\n : PARAMETERS extends z.ZodTypeAny\n ? z.infer<PARAMETERS>\n : never;\n\nexport interface ToolExecutionOptions {\n /**\n * The ID of the tool call. You can use it e.g. when sending tool-call related information with stream data.\n */\n toolCallId: string;\n\n /**\n * Messages that were sent to the language model to initiate the response that contained the tool call.\n * The messages **do not** include the system prompt nor the assistant response that contained the tool call.\n */\n messages: CoreMessage[];\n\n /**\n * An optional abort signal that indicates that the overall operation should be aborted.\n */\n abortSignal?: AbortSignal;\n}\n\n/**\nA tool contains the description and the schema of the input that the tool expects.\nThis enables the language model to generate the input.\n\nThe tool can also contain an optional execute function for the actual execution function of the tool.\n */\nexport type CoreTool<PARAMETERS extends Parameters = any, RESULT = any> = {\n /**\nThe schema of the input that the tool expects. The language model will use this to generate the input.\nIt is also used to validate the output of the language model.\nUse descriptions to make the input understandable for the language model.\n */\n parameters: PARAMETERS;\n\n /**\nOptional conversion function that maps the tool result to multi-part tool content for LLMs.\n */\n experimental_toToolResultContent?: (result: RESULT) => ToolResultContent;\n\n /**\nAn async function that is called with the arguments from the tool call and produces a result.\nIf not provided, the tool will not be executed automatically.\n\n@args is the input of the tool call.\n@options.abortSignal is a signal that can be used to abort the tool call.\n */\n execute?: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n} & (\n | {\n /**\nFunction tool.\n */\n type?: undefined | 'function';\n\n /**\nAn optional description of what the tool does. Will be used by the language model to decide whether to use the tool.\n */\n description?: string;\n }\n | {\n /**\nProvider-defined tool.\n */\n type: 'provider-defined';\n\n /**\nThe ID of the tool. Should follow the format `<provider-name>.<tool-name>`.\n */\n id: `${string}.${string}`;\n\n /**\nThe arguments for configuring the tool. Must match the expected arguments defined by the provider for this tool.\n */\n args: Record<string, unknown>;\n }\n);\n\n/**\nHelper function for inferring the execute args of a tool.\n */\n// Note: special type inference is needed for the execute function args to make sure they are inferred correctly.\nexport function tool<PARAMETERS extends Parameters, RESULT>(\n tool: CoreTool<PARAMETERS, RESULT> & {\n execute: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n },\n): CoreTool<PARAMETERS, RESULT> & {\n execute: (\n args: inferParameters<PARAMETERS>,\n options: ToolExecutionOptions,\n ) => PromiseLike<RESULT>;\n};\nexport function tool<PARAMETERS extends Parameters, RESULT>(\n tool: CoreTool<PARAMETERS, RESULT> & {\n execute?: undefined;\n },\n): CoreTool<PARAMETERS, RESULT> & {\n execute: undefined;\n};\nexport function tool<PARAMETERS extends Parameters, RESULT = any>(\n tool: CoreTool<PARAMETERS, RESULT>,\n): CoreTool<PARAMETERS, RESULT> {\n return tool;\n}\n","/**\n * Calculates the cosine similarity between two vectors. This is a useful metric for\n * comparing the similarity of two vectors such as embeddings.\n *\n * @param vector1 - The first vector.\n * @param vector2 - The second vector.\n *\n * @returns The cosine similarity between vector1 and vector2.\n * @throws {Error} If the vectors do not have the same length.\n */\nexport function cosineSimilarity(vector1: number[], vector2: number[]) {\n if (vector1.length !== vector2.length) {\n throw new Error(\n `Vectors must have the same length (vector1: ${vector1.length} elements, vector2: ${vector2.length} elements)`,\n );\n }\n\n return (\n dotProduct(vector1, vector2) / (magnitude(vector1) * magnitude(vector2))\n );\n}\n\n/**\n * Calculates the dot product of two vectors.\n * @param vector1 - The first vector.\n * @param vector2 - The second vector.\n * @returns The dot product of vector1 and vector2.\n */\nfunction dotProduct(vector1: number[], vector2: number[]) {\n return vector1.reduce(\n (accumulator: number, value: number, index: number) =>\n accumulator + value * vector2[index]!,\n 0,\n );\n}\n\n/**\n * Calculates the magnitude of a vector.\n * @param vector - The vector.\n * @returns The magnitude of the vector.\n */\nfunction magnitude(vector: number[]) {\n return Math.sqrt(dotProduct(vector, vector));\n}\n","import {\n AssistantMessage,\n DataMessage,\n formatAssistantStreamPart,\n} from '@ai-sdk/ui-utils';\n\n/**\nYou can pass the thread and the latest message into the `AssistantResponse`. This establishes the context for the response.\n */\ntype AssistantResponseSettings = {\n /**\nThe thread ID that the response is associated with.\n */\n threadId: string;\n\n /**\nThe ID of the latest message that the response is associated with.\n */\n messageId: string;\n};\n\n/**\nThe process parameter is a callback in which you can run the assistant on threads, and send messages and data messages to the client.\n */\ntype AssistantResponseCallback = (options: {\n /**\nForwards an assistant message (non-streaming) to the client.\n */\n sendMessage: (message: AssistantMessage) => void;\n\n /**\nSend a data message to the client. You can use this to provide information for rendering custom UIs while the assistant is processing the thread.\n */\n sendDataMessage: (message: DataMessage) => void;\n\n /**\nForwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.\n */\n forwardStream: (stream: any) => Promise<any | undefined>;\n}) => Promise<void>;\n\n/**\nThe `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.\nIt is designed to facilitate streaming assistant responses to the `useAssistant` hook.\nIt receives an assistant thread and a current message, and can send messages and data messages to the client.\n */\nexport function AssistantResponse(\n { threadId, messageId }: AssistantResponseSettings,\n process: AssistantResponseCallback,\n): Response {\n const stream = new ReadableStream({\n async start(controller) {\n const textEncoder = new TextEncoder();\n\n const sendMessage = (message: AssistantMessage) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_message', message),\n ),\n );\n };\n\n const sendDataMessage = (message: DataMessage) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('data_message', message),\n ),\n );\n };\n\n const sendError = (errorMessage: string) => {\n controller.enqueue(\n textEncoder.encode(formatAssistantStreamPart('error', errorMessage)),\n );\n };\n\n const forwardStream = async (stream: any) => {\n let result: any | undefined = undefined;\n\n for await (const value of stream) {\n switch (value.event) {\n case 'thread.message.created': {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_message', {\n id: value.data.id,\n role: 'assistant',\n content: [{ type: 'text', text: { value: '' } }],\n }),\n ),\n );\n break;\n }\n\n case 'thread.message.delta': {\n const content = value.data.delta.content?.[0];\n\n if (content?.type === 'text' && content.text?.value != null) {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('text', content.text.value),\n ),\n );\n }\n\n break;\n }\n\n case 'thread.run.completed':\n case 'thread.run.requires_action': {\n result = value.data;\n break;\n }\n }\n }\n\n return result;\n };\n\n // send the threadId and messageId as the first message:\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart('assistant_control_data', {\n threadId,\n messageId,\n }),\n ),\n );\n\n try {\n await process({\n sendMessage,\n sendDataMessage,\n forwardStream,\n });\n } catch (error) {\n sendError((error as any).message ?? `${error}`);\n } finally {\n controller.close();\n }\n },\n pull(controller) {},\n cancel() {},\n });\n\n return new Response(stream, {\n status: 200,\n headers: {\n 'Content-Type': 'text/plain; charset=utf-8',\n },\n });\n}\n","import { formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from '../core/data-stream/data-stream-writer';\nimport { mergeStreams } from '../core/util/merge-streams';\nimport { prepareResponseHeaders } from '../core/util/prepare-response-headers';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\nimport { StreamData } from './stream-data';\n\ntype LangChainImageDetail = 'auto' | 'low' | 'high';\n\ntype LangChainMessageContentText = {\n type: 'text';\n text: string;\n};\n\ntype LangChainMessageContentImageUrl = {\n type: 'image_url';\n image_url:\n | string\n | {\n url: string;\n detail?: LangChainImageDetail;\n };\n};\n\ntype LangChainMessageContentComplex =\n | LangChainMessageContentText\n | LangChainMessageContentImageUrl\n | (Record<string, any> & {\n type?: 'text' | 'image_url' | string;\n })\n | (Record<string, any> & {\n type?: never;\n });\n\ntype LangChainMessageContent = string | LangChainMessageContentComplex[];\n\ntype LangChainAIMessageChunk = {\n content: LangChainMessageContent;\n};\n\n// LC stream event v2\ntype LangChainStreamEvent = {\n event: string;\n data: any;\n};\n\nfunction toDataStreamInternal(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n callbacks?: StreamCallbacks,\n) {\n return stream\n .pipeThrough(\n new TransformStream<\n LangChainStreamEvent | LangChainAIMessageChunk | string\n >({\n transform: async (value, controller) => {\n // text stream:\n if (typeof value === 'string') {\n controller.enqueue(value);\n return;\n }\n\n // LC stream events v2:\n if ('event' in value) {\n // chunk is AIMessage Chunk for on_chat_model_stream event:\n if (value.event === 'on_chat_model_stream') {\n forwardAIMessageChunk(\n value.data?.chunk as LangChainAIMessageChunk,\n controller,\n );\n }\n return;\n }\n\n // AI Message chunk stream:\n forwardAIMessageChunk(value, controller);\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart('text', chunk));\n },\n }),\n );\n}\n\n/**\nConverts LangChain output streams to an AI SDK Data Stream.\n\nThe following streams are supported:\n- `LangChainAIMessageChunk` streams (LangChain `model.stream` output)\n- `string` streams (LangChain `StringOutputParser` output)\n */\nexport function toDataStream(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n callbacks?: StreamCallbacks,\n) {\n return toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n}\n\nexport function toDataStreamResponse(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n options?: {\n init?: ResponseInit;\n data?: StreamData;\n callbacks?: StreamCallbacks;\n },\n) {\n const dataStream = toDataStreamInternal(\n stream,\n options?.callbacks,\n ).pipeThrough(new TextEncoderStream());\n const data = options?.data;\n const init = options?.init;\n\n const responseStream = data\n ? mergeStreams(data.stream, dataStream)\n : dataStream;\n\n return new Response(responseStream, {\n status: init?.status ?? 200,\n statusText: init?.statusText,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n });\n}\n\nexport function mergeIntoDataStream(\n stream:\n | ReadableStream<LangChainStreamEvent>\n | ReadableStream<LangChainAIMessageChunk>\n | ReadableStream<string>,\n options: { dataStream: DataStreamWriter; callbacks?: StreamCallbacks },\n) {\n options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));\n}\n\nfunction forwardAIMessageChunk(\n chunk: LangChainAIMessageChunk,\n controller: TransformStreamDefaultController<any>,\n) {\n if (typeof chunk.content === 'string') {\n controller.enqueue(chunk.content);\n } else {\n const content: LangChainMessageContentComplex[] = chunk.content;\n for (const item of content) {\n if (item.type === 'text') {\n controller.enqueue(item.text);\n }\n }\n }\n}\n","/**\n * Configuration options and helper callback methods for stream lifecycle events.\n */\nexport interface StreamCallbacks {\n /** `onStart`: Called once when the stream is initialized. */\n onStart?: () => Promise<void> | void;\n\n /**\n * `onCompletion`: Called for each tokenized message.\n *\n * @deprecated Use `onFinal` instead.\n */\n onCompletion?: (completion: string) => Promise<void> | void;\n\n /** `onFinal`: Called once when the stream is closed with the final completion message. */\n onFinal?: (completion: string) => Promise<void> | void;\n\n /** `onToken`: Called for each tokenized message. */\n onToken?: (token: string) => Promise<void> | void;\n\n /** `onText`: Called for each text chunk. */\n onText?: (text: string) => Promise<void> | void;\n}\n\n/**\n * Creates a transform stream that encodes input messages and invokes optional callback functions.\n * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.\n * - `onStart`: Called once when the stream is initialized.\n * - `onToken`: Called for each tokenized message.\n * - `onCompletion`: Called every time a completion message is received. This can occur multiple times when using e.g. OpenAI functions\n * - `onFinal`: Called once when the stream is closed with the final completion message.\n *\n * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.\n *\n * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.\n * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.\n *\n * @example\n * const callbacks = {\n * onStart: async () => console.log('Stream started'),\n * onToken: async (token) => console.log(`Token: ${token}`),\n * onCompletion: async (completion) => console.log(`Completion: ${completion}`)\n * onFinal: async () => data.close()\n * };\n * const transformer = createCallbacksTransformer(callbacks);\n */\nexport function createCallbacksTransformer(\n callbacks: StreamCallbacks | undefined = {},\n): TransformStream<string, Uint8Array> {\n const textEncoder = new TextEncoder();\n let aggregatedResponse = '';\n\n return new TransformStream({\n async start(): Promise<void> {\n if (callbacks.onStart) await callbacks.onStart();\n },\n\n async transform(message, controller): Promise<void> {\n controller.enqueue(textEncoder.encode(message));\n\n aggregatedResponse += message;\n\n if (callbacks.onToken) await callbacks.onToken(message);\n if (callbacks.onText && typeof message === 'string') {\n await callbacks.onText(message);\n }\n },\n\n async flush(): Promise<void> {\n if (callbacks.onCompletion) {\n await callbacks.onCompletion(aggregatedResponse);\n }\n if (callbacks.onFinal) {\n await callbacks.onFinal(aggregatedResponse);\n }\n },\n });\n}\n","import { convertAsyncIteratorToReadableStream } from '@ai-sdk/provider-utils';\nimport { formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { DataStreamWriter } from '../core/data-stream/data-stream-writer';\nimport { mergeStreams } from '../core/util/merge-streams';\nimport { prepareResponseHeaders } from '../core/util/prepare-response-headers';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\nimport { StreamData } from './stream-data';\n\ntype EngineResponse = {\n delta: string;\n};\n\nfunction toDataStreamInternal(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n const trimStart = trimStartOfStream();\n\n return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())\n .pipeThrough(\n new TransformStream({\n async transform(message, controller): Promise<void> {\n controller.enqueue(trimStart(message.delta));\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart('text', chunk));\n },\n }),\n );\n}\n\nexport function toDataStream(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n return toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n}\n\nexport function toDataStreamResponse(\n stream: AsyncIterable<EngineResponse>,\n options: {\n init?: ResponseInit;\n data?: StreamData;\n callbacks?: StreamCallbacks;\n } = {},\n) {\n const { init, data, callbacks } = options;\n const dataStream = toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream(),\n );\n const responseStream = data\n ? mergeStreams(data.stream, dataStream)\n : dataStream;\n\n return new Response(responseStream, {\n status: init?.status ?? 200,\n statusText: init?.statusText,\n headers: prepareResponseHeaders(init?.headers, {\n contentType: 'text/plain; charset=utf-8',\n dataStreamVersion: 'v1',\n }),\n });\n}\n\nexport function mergeIntoDataStream(\n stream: AsyncIterable<EngineResponse>,\n options: {\n dataStream: DataStreamWriter;\n callbacks?: StreamCallbacks;\n },\n) {\n options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));\n}\n\nfunction trimStartOfStream(): (text: string) => string {\n let isStreamStart = true;\n\n return (text: string): string => {\n if (isStreamStart) {\n text = text.trimStart();\n if (text) isStreamStart = false;\n }\n return text;\n };\n}\n","import { JSONValue, formatDataStreamPart } from '@ai-sdk/ui-utils';\nimport { HANGING_STREAM_WARNING_TIME_MS } from '../util/constants';\n\n/**\n * A stream wrapper to send custom JSON-encoded data back to the client.\n *\n * @deprecated Please use `createDataStream`, `createDataStreamResponse`, and `pipeDataStreamToResponse` instead.\n */\nexport class StreamData {\n private encoder = new TextEncoder();\n\n private controller: ReadableStreamController<Uint8Array> | null = null;\n public stream: ReadableStream<Uint8Array>;\n\n private isClosed: boolean = false;\n private warningTimeout: NodeJS.Timeout | null = null;\n\n constructor() {\n const self = this;\n\n this.stream = new ReadableStream({\n start: async controller => {\n self.controller = controller;\n\n // Set a timeout to show a warning if the stream is not closed within 3 seconds\n if (process.env.NODE_ENV === 'development') {\n self.warningTimeout = setTimeout(() => {\n console.warn(\n 'The data stream is hanging. Did you forget to close it with `data.close()`?',\n );\n }, HANGING_STREAM_WARNING_TIME_MS);\n }\n },\n pull: controller => {\n // No-op: we don't need to do anything special on pull\n },\n cancel: reason => {\n this.isClosed = true;\n },\n });\n }\n\n async close(): Promise<void> {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.close();\n this.isClosed = true;\n\n // Clear the warning timeout if the stream is closed\n if (this.warningTimeout) {\n clearTimeout(this.warningTimeout);\n }\n }\n\n append(value: JSONValue): void {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart('data', [value])),\n );\n }\n\n appendMessageAnnotation(value: JSONValue): void {\n if (this.isClosed) {\n throw new Error('Data Stream has already been closed.');\n }\n\n if (!this.controller) {\n throw new Error('Stream controller is not initialized.');\n }\n\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart('message_annotations', [value])),\n );\n }\n}\n","/**\n * Warning time for notifying developers that a stream is hanging in dev mode\n * using a console.warn.\n */\nexport const HANGING_STREAM_WARNING_TIME_MS = 15 * 1000;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,IAAAA,oBAOO;AAkBP,IAAAC,0BAA2B;;;AC1B3B,IAAAC,mBAA2B;;;ACA3B,sBAAuD;AAGhD,SAAS,iBAAiB;AAAA,EAC/B;AAAA,EACA,UAAU,MAAM;AAAA;AAClB,GAGqC;AACnC,MAAI;AAEJ,QAAM,wBAAyC,CAAC;AAEhD,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,eAAe;AACnB,mBAAa;AAAA,IACf;AAAA,EACF,CAAC;AAED,WAAS,YAAY,MAAwB;AAC3C,QAAI;AACF,iBAAW,QAAQ,IAAI;AAAA,IACzB,SAAS,OAAO;AAAA,IAEhB;AAAA,EACF;AAEA,MAAI;AACF,UAAM,SAAS,QAAQ;AAAA,MACrB,UAAU,MAAM;AACd,wBAAY,sCAAqB,QAAQ,CAAC,IAAI,CAAC,CAAC;AAAA,MAClD;AAAA,MACA,uBAAuB,YAAY;AACjC,wBAAY,sCAAqB,uBAAuB,CAAC,UAAU,CAAC,CAAC;AAAA,MACvE;AAAA,MACA,MAAM,WAAW;AACf,8BAAsB;AAAA,WACnB,YAAY;AACX,kBAAM,SAAS,UAAU,UAAU;AACnC,mBAAO,MAAM;AACX,oBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,kBAAI;AAAM;AACV,0BAAY,KAAK;AAAA,YACnB;AAAA,UACF,GAAG,EAAE,MAAM,WAAS;AAClB,4BAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,UAC3D,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,QAAQ;AACV,4BAAsB;AAAA,QACpB,OAAO,MAAM,WAAS;AACpB,0BAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,QAC3D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,oBAAY,sCAAqB,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,EAC3D;AAMA,QAAM,iBAAgC,IAAI,QAAQ,OAAM,YAAW;AACjE,WAAO,sBAAsB,SAAS,GAAG;AACvC,YAAM,sBAAsB,MAAM;AAAA,IACpC;AACA,YAAQ;AAAA,EACV,CAAC;AAED,iBAAe,QAAQ,MAAM;AAC3B,QAAI;AACF,iBAAW,MAAM;AAAA,IACnB,SAAS,OAAO;AAAA,IAEhB;AAAA,EACF,CAAC;AAED,SAAO;AACT;;;ACpFO,SAAS,uBACd,SACA;AAAA,EACE;AAAA,EACA;AACF,GACA;AACA,QAAM,kBAAkB,IAAI,QAAQ,4BAAW,CAAC,CAAC;AAEjD,MAAI,CAAC,gBAAgB,IAAI,cAAc,GAAG;AACxC,oBAAgB,IAAI,gBAAgB,WAAW;AAAA,EACjD;AAEA,MAAI,sBAAsB,QAAW;AACnC,oBAAgB,IAAI,2BAA2B,iBAAiB;AAAA,EAClE;AAEA,SAAO;AACT;;;ACdO,SAAS,yBAAyB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGa;AACX,SAAO,IAAI;AAAA,IACT,iBAAiB,EAAE,SAAS,QAAQ,CAAC,EAAE,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC1E;AAAA,MACE;AAAA,MACA;AAAA,MACA,SAAS,uBAAuB,SAAS;AAAA,QACvC,aAAa;AAAA,QACb,mBAAmB;AAAA,MACrB,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACzBO,SAAS,2BACd,SACA;AAAA,EACE;AAAA,EACA;AACF,GACA;AACA,QAAM,kBAA8D,CAAC;AAErE,MAAI,WAAW,MAAM;AACnB,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,sBAAgB,GAAG,IAAI;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,gBAAgB,cAAc,KAAK,MAAM;AAC3C,oBAAgB,cAAc,IAAI;AAAA,EACpC;AAEA,MAAI,sBAAsB,QAAW;AACnC,oBAAgB,yBAAyB,IAAI;AAAA,EAC/C;AAEA,SAAO;AACT;;;ACnBO,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAMS;AACP,WAAS,UAAU,0BAAU,KAAK,YAAY,OAAO;AAErD,QAAM,SAAS,OAAO,UAAU;AAChC,QAAM,OAAO,YAAY;AACvB,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI;AAAM;AACV,iBAAS,MAAM,KAAK;AAAA,MACtB;AAAA,IACF,SAAS,OAAO;AACd,YAAM;AAAA,IACR,UAAE;AACA,eAAS,IAAI;AAAA,IACf;AAAA,EACF;AAEA,OAAK;AACP;;;AC9BO,SAAS,yBACd,UACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAIM;AACN,wBAAsB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS,2BAA2B,SAAS;AAAA,MAC3C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,IACD,QAAQ,iBAAiB,EAAE,SAAS,QAAQ,CAAC,EAAE;AAAA,MAC7C,IAAI,kBAAkB;AAAA,IACxB;AAAA,EACF,CAAC;AACH;;;AC/BA,sBAA2B;AAE3B,IAAM,OAAO;AACb,IAAM,SAAS,mBAAmB,IAAI;AACtC,IAAM,SAAS,OAAO,IAAI,MAAM;AAJhC;AAMO,IAAM,uBAAN,cAAmC,2BAAW;AAAA,EAMnD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAIG;AACD,UAAM;AAAA,MACJ;AAAA,MACA,SAAS,kCAAkC,SAAS,KAAK,OAAO;AAAA,IAClE,CAAC;AAjBH,SAAkB,MAAU;AAmB1B,SAAK,YAAY;AACjB,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,OAAO,WAAW,OAA+C;AAC/D,WAAO,2BAAW,UAAU,OAAO,MAAM;AAAA,EAC3C;AACF;AA1BoB;;;ACPpB,IAAAC,mBAA6B;AAC7B,4BAA8C;;;ACD9C,eAAsB,MAAM,WAAmC;AAC7D,SAAO,cAAc,SACjB,QAAQ,QAAQ,IAChB,IAAI,QAAQ,aAAW,WAAW,SAAS,SAAS,CAAC;AAC3D;;;ACJA,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAWO,IAAM,aAAN,cAAyB,4BAAW;AAAA,EAQzC,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAhBzB,SAAkBG,OAAU;AAkB1B,SAAK,SAAS;AACd,SAAK,SAAS;AAGd,SAAK,YAAY,OAAO,OAAO,SAAS,CAAC;AAAA,EAC3C;AAAA,EAEA,OAAO,WAAW,OAAqC;AACrD,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA5BoBE,MAAAD;;;AFCb,IAAM,8BACX,CAAC;AAAA,EACC,aAAa;AAAA,EACb,mBAAmB;AAAA,EACnB,gBAAgB;AAClB,IAAI,CAAC,MACL,OAAe,MACb,6BAA6B,GAAG;AAAA,EAC9B;AAAA,EACA,WAAW;AAAA,EACX;AACF,CAAC;AAEL,eAAe,6BACb,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA,SAAoB,CAAC,GACJ;AACjB,MAAI;AACF,WAAO,MAAM,EAAE;AAAA,EACjB,SAAS,OAAO;AACd,YAAI,oCAAa,KAAK,GAAG;AACvB,YAAM;AAAA,IACR;AAEA,QAAI,eAAe,GAAG;AACpB,YAAM;AAAA,IACR;AAEA,UAAM,mBAAe,uCAAgB,KAAK;AAC1C,UAAM,YAAY,CAAC,GAAG,QAAQ,KAAK;AACnC,UAAM,YAAY,UAAU;AAE5B,QAAI,YAAY,YAAY;AAC1B,YAAM,IAAI,WAAW;AAAA,QACnB,SAAS,gBAAgB,SAAS,0BAA0B,YAAY;AAAA,QACxE,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,QACE,iBAAiB,SACjB,8BAAa,WAAW,KAAK,KAC7B,MAAM,gBAAgB,QACtB,aAAa,YACb;AACA,YAAM,MAAM,SAAS;AACrB,aAAO;AAAA,QACL;AAAA,QACA,EAAE,YAAY,WAAW,gBAAgB,WAAW,cAAc;AAAA,QAClE;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,GAAG;AACnB,YAAM;AAAA,IACR;AAEA,UAAM,IAAI,WAAW;AAAA,MACnB,SAAS,gBAAgB,SAAS,wCAAwC,YAAY;AAAA,MACtF,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AACF;;;AGzEO,SAAS,eAAe;AAAA,EAC7B;AACF,GAKE;AACA,MAAI,cAAc,MAAM;AACtB,QAAI,CAAC,OAAO,UAAU,UAAU,GAAG;AACjC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,aAAa,GAAG;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,mBAAmB,kCAAc;AAEvC,SAAO;AAAA,IACL,YAAY;AAAA,IACZ,OAAO,4BAA4B,EAAE,YAAY,iBAAiB,CAAC;AAAA,EACrE;AACF;;;ACvCO,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AACF,GAGG;AACD,SAAO;AAAA;AAAA,IAEL,kBAAkB,GAAG,WAAW,IAC9B,uCAAW,eAAc,OAAO,IAAI,UAAU,UAAU,KAAK,EAC/D;AAAA,IACA,iBAAiB,uCAAW;AAAA;AAAA,IAG5B,kBAAkB;AAAA,IAClB,2BAA2B,uCAAW;AAAA,EACxC;AACF;;;AChBO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAKe;AAdf,MAAAE;AAeE,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B,eAAe,MAAM;AAAA;AAAA,IAGrB,GAAG,OAAO,QAAQ,QAAQ,EAAE,OAAO,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AAC/D,iBAAW,eAAe,GAAG,EAAE,IAAI;AACnC,aAAO;AAAA,IACT,GAAG,CAAC,CAAe;AAAA;AAAA,IAGnB,GAAG,OAAO,SAAQA,OAAA,uCAAW,aAAX,OAAAA,OAAuB,CAAC,CAAC,EAAE;AAAA,MAC3C,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AAC5B,mBAAW,yBAAyB,GAAG,EAAE,IAAI;AAC7C,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA;AAAA,IAGA,GAAG,OAAO,QAAQ,4BAAW,CAAC,CAAC,EAAE,OAAO,CAAC,YAAY,CAAC,KAAK,KAAK,MAAM;AACpE,UAAI,UAAU,QAAW;AACvB,mBAAW,sBAAsB,GAAG,EAAE,IAAI;AAAA,MAC5C;AACA,aAAO;AAAA,IACT,GAAG,CAAC,CAAe;AAAA,EACrB;AACF;;;AC1CA,iBAA8B;;;ACKvB,IAAM,aAAqB;AAAA,EAChC,YAAkB;AAChB,WAAO;AAAA,EACT;AAAA,EAEA,gBACEC,QACA,MACA,MACA,MACiB;AACjB,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AACA,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AACA,QAAI,OAAO,SAAS,YAAY;AAC9B,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AACF;AAEA,IAAM,WAAiB;AAAA,EACrB,cAAc;AACZ,WAAO;AAAA,EACT;AAAA,EACA,eAAe;AACb,WAAO;AAAA,EACT;AAAA,EACA,gBAAgB;AACd,WAAO;AAAA,EACT;AAAA,EACA,WAAW;AACT,WAAO;AAAA,EACT;AAAA,EACA,UAAU;AACR,WAAO;AAAA,EACT;AAAA,EACA,WAAW;AACT,WAAO;AAAA,EACT;AAAA,EACA,YAAY;AACV,WAAO;AAAA,EACT;AAAA,EACA,aAAa;AACX,WAAO;AAAA,EACT;AAAA,EACA,MAAM;AACJ,WAAO;AAAA,EACT;AAAA,EACA,cAAc;AACZ,WAAO;AAAA,EACT;AAAA,EACA,kBAAkB;AAChB,WAAO;AAAA,EACT;AACF;AAEA,IAAM,kBAA+B;AAAA,EACnC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AACd;;;ADjEO,SAAS,UAAU;AAAA,EACxB,YAAY;AAAA,EACZ;AACF,IAGI,CAAC,GAAW;AACd,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ;AACV,WAAO;AAAA,EACT;AAEA,SAAO,iBAAM,UAAU,IAAI;AAC7B;;;AEnBA,IAAAC,cAAyD;AAElD,SAAS,WAAc;AAAA,EAC5B,MAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAMG;AACD,SAAO,OAAO,gBAAgBA,QAAM,EAAE,WAAW,GAAG,OAAM,SAAQ;AAChE,QAAI;AACF,YAAM,SAAS,MAAM,GAAG,IAAI;AAE5B,UAAI,aAAa;AACf,aAAK,IAAI;AAAA,MACX;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI;AACF,YAAI,iBAAiB,OAAO;AAC1B,eAAK,gBAAgB;AAAA,YACnB,MAAM,MAAM;AAAA,YACZ,SAAS,MAAM;AAAA,YACf,OAAO,MAAM;AAAA,UACf,CAAC;AACD,eAAK,UAAU;AAAA,YACb,MAAM,2BAAe;AAAA,YACrB,SAAS,MAAM;AAAA,UACjB,CAAC;AAAA,QACH,OAAO;AACL,eAAK,UAAU,EAAE,MAAM,2BAAe,MAAM,CAAC;AAAA,QAC/C;AAAA,MACF,UAAE;AAEA,aAAK,IAAI;AAAA,MACX;AAEA,YAAM;AAAA,IACR;AAAA,EACF,CAAC;AACH;;;AC5CO,SAAS,0BAA0B;AAAA,EACxC;AAAA,EACA;AACF,GASe;AAEb,OAAI,uCAAW,eAAc,MAAM;AACjC,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,OAAO,QAAQ,UAAU,EAAE,OAAO,CAACC,aAAY,CAAC,KAAK,KAAK,MAAM;AACrE,QAAI,UAAU,QAAW;AACvB,aAAOA;AAAA,IACT;AAGA,QACE,OAAO,UAAU,YACjB,WAAW,SACX,OAAO,MAAM,UAAU,YACvB;AAEA,WAAI,uCAAW,kBAAiB,OAAO;AACrC,eAAOA;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,MAAM;AAE3B,aAAO,WAAW,SACdA,cACA,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,OAAO;AAAA,IACrC;AAGA,QACE,OAAO,UAAU,YACjB,YAAY,SACZ,OAAO,MAAM,WAAW,YACxB;AAEA,WAAI,uCAAW,mBAAkB,OAAO;AACtC,eAAOA;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,OAAO;AAE5B,aAAO,WAAW,SACdA,cACA,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,OAAO;AAAA,IACrC;AAGA,WAAO,EAAE,GAAGA,aAAY,CAAC,GAAG,GAAG,MAAM;AAAA,EACvC,GAAG,CAAC,CAAC;AACP;;;AC3CA,eAAsB,MAAa;AAAA,EACjC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAC1B,GAiCgC;AAC9B,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,WAAW;AAAA,EACzB,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB,EAAE,aAAa,YAAY,UAAU,CAAC;AAAA,QAC/D,GAAG;AAAA,QACH,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAChB,YAAM,EAAE,WAAW,OAAO,YAAY,IAAI,MAAM;AAAA,QAAM;AAAA;AAAA,UAEpD,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa,EAAE,OAAO,MAAM,CAAC,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,cACtD;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AAvGnC,kBAAAC;AAwGY,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC,QAAQ,CAAC,KAAK;AAAA,gBACd;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMC,aAAY,cAAc,WAAW,CAAC;AAC5C,oBAAMC,UAAQF,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACN,cAAc,WAAW;AAAA,wBAAI,CAAAC,eAC3B,KAAK,UAAUA,UAAS;AAAA,sBAC1B;AAAA,oBACJ;AAAA,oBACA,mBAAmBC,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO;AAAA,gBACL,WAAAD;AAAA,gBACA,OAAAC;AAAA,gBACA,aAAa,cAAc;AAAA,cAC7B;AAAA,YACF;AAAA,UACF,CAAC;AAAA;AAAA,MACH;AAEA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,gBAAgB,EAAE,QAAQ,MAAM,KAAK,UAAU,SAAS,EAAE;AAAA,YAC1D,mBAAmB,MAAM;AAAA,UAC3B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,mBAAmB,EAAE,OAAO,WAAW,OAAO,YAAY,CAAC;AAAA,IACxE;AAAA,EACF,CAAC;AACH;AAEA,IAAM,qBAAN,MAA8D;AAAA,EAM5D,YAAY,SAKT;AACD,SAAK,QAAQ,QAAQ;AACrB,SAAK,YAAY,QAAQ;AACzB,SAAK,QAAQ,QAAQ;AACrB,SAAK,cAAc,QAAQ;AAAA,EAC7B;AACF;;;ACjKO,SAAS,WAAc,OAAY,WAA0B;AAClE,MAAI,aAAa,GAAG;AAClB,UAAM,IAAI,MAAM,kCAAkC;AAAA,EACpD;AAEA,QAAM,SAAS,CAAC;AAChB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;;;ACQA,eAAsB,UAAiB;AAAA,EACrC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAC1B,GAiCoC;AAClC,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,WAAW;AAAA,EACzB,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB,EAAE,aAAa,gBAAgB,UAAU,CAAC;AAAA,QACnE,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,OAAO,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,QACxD;AAAA,MACF;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAChB,YAAM,uBAAuB,MAAM;AAInC,UAAI,wBAAwB,MAAM;AAChC,cAAM,EAAE,YAAAC,aAAY,MAAM,IAAI,MAAM,MAAM,MAAM;AAE9C,iBAAO,WAAW;AAAA,YAChB,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa;AAAA,kBACX,OAAO,MAAM,OAAO,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AAtHrC,kBAAAC;AAuHc,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMD,cAAa,cAAc;AACjC,oBAAME,UAAQD,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACND,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,oBACzD;AAAA,oBACA,mBAAmBE,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,YAAAF,aAAY,OAAAE,OAAM;AAAA,YAC7B;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,aAAK;AAAA,UACH,0BAA0B;AAAA,YACxB;AAAA,YACA,YAAY;AAAA,cACV,iBAAiB;AAAA,gBACf,QAAQ,MACNF,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,cACzD;AAAA,cACA,mBAAmB,MAAM;AAAA,YAC3B;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO,IAAI,uBAAuB,EAAE,QAAQ,YAAAA,aAAY,MAAM,CAAC;AAAA,MACjE;AAGA,YAAM,cAAc,WAAW,QAAQ,oBAAoB;AAG3D,YAAM,aAA+B,CAAC;AACtC,UAAI,SAAS;AAEb,iBAAW,SAAS,aAAa;AAC/B,cAAM,EAAE,YAAY,oBAAoB,MAAM,IAAI,MAAM,MAAM,MAAM;AAElE,iBAAO,WAAW;AAAA,YAChB,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA;AAAA,gBAEH,aAAa;AAAA,kBACX,OAAO,MAAM,MAAM,IAAI,WAAS,KAAK,UAAU,KAAK,CAAC;AAAA,gBACvD;AAAA,cACF;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAM,gBAAe;AA7LrC,kBAAAC;AA8Lc,oBAAM,gBAAgB,MAAM,MAAM,QAAQ;AAAA,gBACxC,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF,CAAC;AAED,oBAAMD,cAAa,cAAc;AACjC,oBAAME,UAAQD,OAAA,cAAc,UAAd,OAAAA,OAAuB,EAAE,QAAQ,IAAI;AAEnD,0BAAY;AAAA,gBACV,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,iBAAiB;AAAA,sBACf,QAAQ,MACND,YAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,oBACzD;AAAA,oBACA,mBAAmBE,OAAM;AAAA,kBAC3B;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,YAAAF,aAAY,OAAAE,OAAM;AAAA,YAC7B;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,mBAAW,KAAK,GAAG,kBAAkB;AACrC,kBAAU,MAAM;AAAA,MAClB;AAEA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,iBAAiB;AAAA,cACf,QAAQ,MACN,WAAW,IAAI,eAAa,KAAK,UAAU,SAAS,CAAC;AAAA,YACzD;AAAA,YACA,mBAAmB;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,uBAAuB;AAAA,QAChC;AAAA,QACA;AAAA,QACA,OAAO,EAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,IAAM,yBAAN,MAAsE;AAAA,EAKpE,YAAY,SAIT;AACD,SAAK,SAAS,QAAQ;AACtB,SAAK,aAAa,QAAQ;AAC1B,SAAK,QAAQ,QAAQ;AAAA,EACvB;AACF;;;AChQA,IAAAC,yBAA0C;AAmB1C,eAAsB,cAAc;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AACF,GAsDiC;AAC/B,QAAM,EAAE,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE9D,QAAM,EAAE,OAAO,IAAI,MAAM;AAAA,IAAM,MAC7B,MAAM,WAAW;AAAA,MACf;AAAA,MACA,GAAG,gBAAK;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA,iBAAiB,4CAAmB,CAAC;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,2BAA2B,EAAE,cAAc,OAAO,CAAC;AAChE;AAEA,IAAM,6BAAN,MAAgE;AAAA,EAG9D,YAAY,SAA0C;AACpD,SAAK,SAAS,QAAQ,aAAa,IAAI,aAAW;AAAA,MAChD;AAAA,MACA,IAAI,aAAa;AACf,mBAAO,kDAA0B,KAAK,MAAM;AAAA,MAC9C;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,OAAO,CAAC;AAAA,EACtB;AACF;;;AClHA,IAAAC,yBAAiD;;;ACDjD,IAAAC,mBAA2B;AAI3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AANhC,IAAAE;AAmBO,IAAM,yBAAN,cAAqC,4BAAW;AAAA,EAkBrD,YAAY;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,MAAAC;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAMG;AACD,UAAM,EAAE,MAAAJ,OAAM,SAAS,MAAM,CAAC;AA9BhC,SAAkBG,OAAU;AAgC1B,SAAK,OAAOC;AACZ,SAAK,WAAW;AAChB,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,OAAO,WAAW,OAAiD;AACjE,WAAO,4BAAW,UAAU,OAAOH,OAAM;AAAA,EAC3C;AACF;AAxCoBE,MAAAD;;;ACpBpB,IAAAG,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,gBAAN,cAA4B,4BAAW;AAAA,EAO5C,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,SAAS,OACf,sBAAsB,GAAG,KAAK,UAAU,IAAI,UAAU,KACtD,sBAAsB,GAAG,KAAK,KAAK;AAAA,EACzC,GAMG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AArBhC,SAAkBG,OAAU;AAuB1B,SAAK,MAAM;AACX,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,OAAO,WAAW,OAAwC;AACxD,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA/BoBE,MAAAD;;;ACLpB,eAAsB,SAAS;AAAA,EAC7B;AAAA,EACA,sBAAsB;AACxB,GAMG;AAXH,MAAAE;AAYE,QAAM,UAAU,IAAI,SAAS;AAC7B,MAAI;AACF,UAAM,WAAW,MAAM,oBAAoB,OAAO;AAElD,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,cAAc;AAAA,QACtB,KAAK;AAAA,QACL,YAAY,SAAS;AAAA,QACrB,YAAY,SAAS;AAAA,MACvB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM,IAAI,WAAW,MAAM,SAAS,YAAY,CAAC;AAAA,MACjD,WAAUA,OAAA,SAAS,QAAQ,IAAI,cAAc,MAAnC,OAAAA,OAAwC;AAAA,IACpD;AAAA,EACF,SAAS,OAAO;AACd,QAAI,cAAc,WAAW,KAAK,GAAG;AACnC,YAAM;AAAA,IACR;AAEA,UAAM,IAAI,cAAc,EAAE,KAAK,SAAS,OAAO,MAAM,CAAC;AAAA,EACxD;AACF;;;ACnCA,IAAM,qBAAqB;AAAA,EACzB,EAAE,UAAU,aAAsB,OAAO,CAAC,IAAM,IAAM,EAAI,EAAE;AAAA,EAC5D,EAAE,UAAU,aAAsB,OAAO,CAAC,KAAM,IAAM,IAAM,EAAI,EAAE;AAAA,EAClE,EAAE,UAAU,cAAuB,OAAO,CAAC,KAAM,GAAI,EAAE;AAAA,EACvD,EAAE,UAAU,cAAuB,OAAO,CAAC,IAAM,IAAM,IAAM,EAAI,EAAE;AACrE;AAEO,SAAS,oBACd,OACqE;AACrE,aAAW,EAAE,OAAO,SAAS,KAAK,oBAAoB;AACpD,QACE,MAAM,UAAU,MAAM,UACtB,MAAM,MAAM,CAAC,MAAM,UAAU,MAAM,KAAK,MAAM,IAAI,GAClD;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;ACpBA,IAAAC,yBAGO;;;ACHP,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,0BAAN,cAAsC,4BAAW;AAAA,EAKtD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA,UAAU,+FAA+F,OAAO,OAAO;AAAA,EACzH,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AAbhC,SAAkBG,OAAU;AAe1B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,OAAO,WAAW,OAAkD;AAClE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AArBoBE,MAAAD;;;ADFpB,iBAAkB;AAUX,IAAM,oBAA4C,aAAE,MAAM;AAAA,EAC/D,aAAE,OAAO;AAAA,EACT,aAAE,WAAW,UAAU;AAAA,EACvB,aAAE,WAAW,WAAW;AAAA,EACxB,aAAE;AAAA;AAAA,IAEA,CAAC,UAAiC;AArBtC,UAAAE,MAAA;AAsBM,oBAAAA,OAAA,WAAW,WAAX,gBAAAA,KAAmB,SAAS,WAA5B,YAAsC;AAAA;AAAA,IACxC,EAAE,SAAS,mBAAmB;AAAA,EAChC;AACF,CAAC;AAQM,SAAS,iCAAiC,SAA8B;AAC7E,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,mBAAmB,aAAa;AAClC,eAAO,kDAA0B,IAAI,WAAW,OAAO,CAAC;AAAA,EAC1D;AAEA,aAAO,kDAA0B,OAAO;AAC1C;AAQO,SAAS,+BACd,SACY;AACZ,MAAI,mBAAmB,YAAY;AACjC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,YAAY,UAAU;AAC/B,QAAI;AACF,iBAAO,kDAA0B,OAAO;AAAA,IAC1C,SAAS,OAAO;AACd,YAAM,IAAI,wBAAwB;AAAA,QAChC,SACE;AAAA,QACF;AAAA,QACA,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,mBAAmB,aAAa;AAClC,WAAO,IAAI,WAAW,OAAO;AAAA,EAC/B;AAEA,QAAM,IAAI,wBAAwB,EAAE,QAAQ,CAAC;AAC/C;AAQO,SAAS,wBAAwB,YAAgC;AACtE,MAAI;AACF,WAAO,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,EAC5C,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,mCAAmC;AAAA,EACrD;AACF;;;AE1FA,IAAAC,mBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,0BAAN,cAAsC,4BAAW;AAAA,EAKtD,YAAY;AAAA,IACV;AAAA,IACA,UAAU,0BAA0B,IAAI;AAAA,EAC1C,GAGG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAXzB,SAAkBG,OAAU;AAa1B,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,OAAO,WAAW,OAAkD;AAClE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AAnBoBE,MAAAD;;;ACPb,SAAS,aAAa,SAG3B;AACA,MAAI;AACF,UAAM,CAAC,QAAQ,aAAa,IAAI,QAAQ,MAAM,GAAG;AACjD,WAAO;AAAA,MACL,UAAU,OAAO,MAAM,GAAG,EAAE,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC3C;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,WAAO;AAAA,MACL,UAAU;AAAA,MACV,eAAe;AAAA,IACjB;AAAA,EACF;AACF;;;ACIA,eAAsB,6BAA6B;AAAA,EACjD;AAAA,EACA,yBAAyB;AAAA,EACzB,mBAAmB,MAAM;AAAA,EACzB,yBAAyB;AAC3B,GAKmC;AACjC,QAAM,mBAAmB,MAAM;AAAA,IAC7B,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAI,OAAO,UAAU,OACjB,CAAC,EAAE,MAAM,UAAmB,SAAS,OAAO,OAAO,CAAC,IACpD,CAAC;AAAA,IACL,GAAG,OAAO,SAAS;AAAA,MAAI,aACrB,8BAA8B,SAAS,gBAAgB;AAAA,IACzD;AAAA,EACF;AACF;AASO,SAAS,8BACd,SACA,kBAIwB;AACxB,QAAM,OAAO,QAAQ;AACrB,UAAQ,MAAM;AAAA,IACZ,KAAK,UAAU;AACb,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ;AAAA,QACjB,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AACX,UAAI,OAAO,QAAQ,YAAY,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AAAA,UACjD,kBAAkB,QAAQ;AAAA,QAC5B;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QACd,IAAI,UAAQ,+BAA+B,MAAM,gBAAgB,CAAC,EAElE,OAAO,UAAQ,KAAK,SAAS,UAAU,KAAK,SAAS,EAAE;AAAA,QAC1D,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,UAAI,OAAO,QAAQ,YAAY,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AAAA,UACjD,kBAAkB,QAAQ;AAAA,QAC5B;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QACd;AAAA;AAAA,UAEC,UAAQ,KAAK,SAAS,UAAU,KAAK,SAAS;AAAA,QAChD,EACC,IAAI,UAAQ;AACX,gBAAM,EAAE,+BAA+B,GAAG,KAAK,IAAI;AACnD,iBAAO;AAAA,YACL,GAAG;AAAA,YACH,kBAAkB;AAAA,UACpB;AAAA,QACF,CAAC;AAAA,QACH,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AACX,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,IAAI,WAAS;AAAA,UACpC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA,UACb,SAAS,KAAK;AAAA,UACd,SAAS,KAAK;AAAA,UACd,kBAAkB,KAAK;AAAA,QACzB,EAAE;AAAA,QACF,kBAAkB,QAAQ;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,wBAAwB,EAAE,MAAM,iBAAiB,CAAC;AAAA,IAC9D;AAAA,EACF;AACF;AAKA,eAAe,eACb,UACA,wBACA,wBACA,kBAC6E;AAC7E,QAAM,OAAO,SACV,OAAO,aAAW,QAAQ,SAAS,MAAM,EACzC,IAAI,aAAW,QAAQ,OAAO,EAC9B;AAAA,IAAO,CAAC,YACP,MAAM,QAAQ,OAAO;AAAA,EACvB,EACC,KAAK,EACL;AAAA,IACC,CAAC,SACC,KAAK,SAAS,WAAW,KAAK,SAAS;AAAA,EAC3C,EAKC;AAAA,IACC,CAAC,SACC,EAAE,KAAK,SAAS,WAAW,2BAA2B;AAAA,EAC1D,EACC,IAAI,UAAS,KAAK,SAAS,UAAU,KAAK,QAAQ,KAAK,IAAK,EAC5D;AAAA,IAAI;AAAA;AAAA,MAEH,OAAO,SAAS,aACf,KAAK,WAAW,OAAO,KAAK,KAAK,WAAW,QAAQ,KACjD,IAAI,IAAI,IAAI,IACZ;AAAA;AAAA,EACN,EACC,OAAO,CAAC,UAAwB,iBAAiB,GAAG,EAIpD,OAAO,SAAO,CAAC,iBAAiB,GAAG,CAAC;AAGvC,QAAM,mBAAmB,MAAM,QAAQ;AAAA,IACrC,KAAK,IAAI,OAAM,SAAQ;AAAA,MACrB;AAAA,MACA,MAAM,MAAM,uBAAuB,EAAE,IAAI,CAAC;AAAA,IAC5C,EAAE;AAAA,EACJ;AAEA,SAAO,OAAO;AAAA,IACZ,iBAAiB,IAAI,CAAC,EAAE,KAAK,KAAK,MAAM,CAAC,IAAI,SAAS,GAAG,IAAI,CAAC;AAAA,EAChE;AACF;AAUA,SAAS,+BACP,MACA,kBAO0B;AArN5B,MAAAE;AAsNE,MAAI,KAAK,SAAS,QAAQ;AACxB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,WAA+B,KAAK;AACxC,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,QAAM,OAAO,KAAK;AAClB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,KAAK;AACZ;AAAA,IACF,KAAK;AACH,aAAO,KAAK;AACZ;AAAA,IACF;AACE,YAAM,IAAI,MAAM,0BAA0B,IAAI,EAAE;AAAA,EACpD;AAIA,MAAI;AACF,cAAU,OAAO,SAAS,WAAW,IAAI,IAAI,IAAI,IAAI;AAAA,EACvD,SAAS,OAAO;AACd,cAAU;AAAA,EACZ;AAKA,MAAI,mBAAmB,KAAK;AAE1B,QAAI,QAAQ,aAAa,SAAS;AAChC,YAAM,EAAE,UAAU,iBAAiB,cAAc,IAAI;AAAA,QACnD,QAAQ,SAAS;AAAA,MACnB;AAEA,UAAI,mBAAmB,QAAQ,iBAAiB,MAAM;AACpD,cAAM,IAAI,MAAM,mCAAmC,IAAI,EAAE;AAAA,MAC3D;AAEA,iBAAW;AACX,uBAAiB,+BAA+B,aAAa;AAAA,IAC/D,OAAO;AAML,YAAM,iBAAiB,iBAAiB,QAAQ,SAAS,CAAC;AAC1D,UAAI,gBAAgB;AAClB,yBAAiB,eAAe;AAChC,iDAAa,eAAe;AAAA,MAC9B,OAAO;AACL,yBAAiB;AAAA,MACnB;AAAA,IACF;AAAA,EACF,OAAO;AAGL,qBAAiB,+BAA+B,OAAO;AAAA,EACzD;AAIA,UAAQ,MAAM;AAAA,IACZ,KAAK,SAAS;AAKZ,UAAI,0BAA0B,YAAY;AACxC,oBAAWA,OAAA,oBAAoB,cAAc,MAAlC,OAAAA,OAAuC;AAAA,MACpD;AACA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,OAAO;AAAA,QACP;AAAA,QACA,kBAAkB,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AAEX,UAAI,YAAY,MAAM;AACpB,cAAM,IAAI,MAAM,oCAAoC;AAAA,MACtD;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MACE,0BAA0B,aACtB,iCAAiC,cAAc,IAC/C;AAAA,QACN;AAAA,QACA,kBAAkB,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AACF;;;ACzTO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAGE;AACA,MAAI,aAAa,MAAM;AACrB,QAAI,CAAC,OAAO,UAAU,SAAS,GAAG;AAChC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,YAAY,GAAG;AACjB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,eAAe,MAAM;AACvB,QAAI,OAAO,gBAAgB,UAAU;AACnC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,mBAAmB,MAAM;AAC3B,QAAI,OAAO,oBAAoB,UAAU;AACvC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,oBAAoB,MAAM;AAC5B,QAAI,OAAO,qBAAqB,UAAU;AACxC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,QAAI,CAAC,OAAO,UAAU,IAAI,GAAG;AAC3B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,aAAa,oCAAe;AAAA,IAC5B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eACE,iBAAiB,QAAQ,cAAc,SAAS,IAC5C,gBACA;AAAA,IACN;AAAA,EACF;AACF;;;AC9GA,IAAAC,mBAAmC;AACnC,IAAAC,yBAAkC;AAClC,IAAAC,cAAkB;;;ACFlB,IAAAC,cAAkB;;;ACClB,IAAAC,cAAkB;;;ACAlB,IAAAC,cAAkB;AAEX,IAAM,kBAAwC,cAAE;AAAA,EAAK,MAC1D,cAAE,MAAM;AAAA,IACN,cAAE,KAAK;AAAA,IACP,cAAE,OAAO;AAAA,IACT,cAAE,OAAO;AAAA,IACT,cAAE,QAAQ;AAAA,IACV,cAAE,OAAO,cAAE,OAAO,GAAG,eAAe;AAAA,IACpC,cAAE,MAAM,eAAe;AAAA,EACzB,CAAC;AACH;;;ADDO,IAAM,yBAAsD,cAAE;AAAA,EACnE,cAAE,OAAO;AAAA,EACT,cAAE,OAAO,cAAE,OAAO,GAAG,eAAe;AACtC;;;AEdA,IAAAC,cAAkB;;;ACAlB,IAAAC,cAAkB;AAcX,IAAM,0BAAwD,cAAE;AAAA,EACrE,cAAE,MAAM;AAAA,IACN,cAAE,OAAO,EAAE,MAAM,cAAE,QAAQ,MAAM,GAAG,MAAM,cAAE,OAAO,EAAE,CAAC;AAAA,IACtD,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,OAAO;AAAA,MACvB,MAAM,cAAE,OAAO;AAAA,MACf,UAAU,cAAE,OAAO,EAAE,SAAS;AAAA,IAChC,CAAC;AAAA,EACH,CAAC;AACH;;;ADOO,IAAM,iBAAsC,cAAE,OAAO;AAAA,EAC1D,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,MAAM,cAAE,OAAO;AAAA,EACf,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA6BM,IAAM,kBAAwC,cAAE,OAAO;AAAA,EAC5D,MAAM,cAAE,QAAQ,OAAO;AAAA,EACvB,OAAO,cAAE,MAAM,CAAC,mBAAmB,cAAE,WAAW,GAAG,CAAC,CAAC;AAAA,EACrD,UAAU,cAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA6BM,IAAM,iBAAsC,cAAE,OAAO;AAAA,EAC1D,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,MAAM,cAAE,MAAM,CAAC,mBAAmB,cAAE,WAAW,GAAG,CAAC,CAAC;AAAA,EACpD,UAAU,cAAE,OAAO;AAAA,EACnB,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AA+BM,IAAM,qBAA8C,cAAE,OAAO;AAAA,EAClE,MAAM,cAAE,QAAQ,WAAW;AAAA,EAC3B,YAAY,cAAE,OAAO;AAAA,EACrB,UAAU,cAAE,OAAO;AAAA,EACnB,MAAM,cAAE,QAAQ;AAClB,CAAC;AAyCM,IAAM,uBAAkD,cAAE,OAAO;AAAA,EACtE,MAAM,cAAE,QAAQ,aAAa;AAAA,EAC7B,YAAY,cAAE,OAAO;AAAA,EACrB,UAAU,cAAE,OAAO;AAAA,EACnB,QAAQ,cAAE,QAAQ;AAAA,EAClB,SAAS,wBAAwB,SAAS;AAAA,EAC1C,SAAS,cAAE,QAAQ,EAAE,SAAS;AAAA,EAC9B,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;;;AHxJM,IAAM,0BAAwD,cAAE,OAAO;AAAA,EAC5E,MAAM,cAAE,QAAQ,QAAQ;AAAA,EACxB,SAAS,cAAE,OAAO;AAAA,EAClB,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAiBM,IAAM,wBAAoD,cAAE,OAAO;AAAA,EACxE,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,SAAS,cAAE,MAAM;AAAA,IACf,cAAE,OAAO;AAAA,IACT,cAAE,MAAM,cAAE,MAAM,CAAC,gBAAgB,iBAAiB,cAAc,CAAC,CAAC;AAAA,EACpE,CAAC;AAAA,EACD,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAsBM,IAAM,6BACX,cAAE,OAAO;AAAA,EACP,MAAM,cAAE,QAAQ,WAAW;AAAA,EAC3B,SAAS,cAAE,MAAM;AAAA,IACf,cAAE,OAAO;AAAA,IACT,cAAE,MAAM,cAAE,MAAM,CAAC,gBAAgB,kBAAkB,CAAC,CAAC;AAAA,EACvD,CAAC;AAAA,EACD,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAsBI,IAAM,wBAAoD,cAAE,OAAO;AAAA,EACxE,MAAM,cAAE,QAAQ,MAAM;AAAA,EACtB,SAAS,cAAE,MAAM,oBAAoB;AAAA,EACrC,+BAA+B,uBAAuB,SAAS;AACjE,CAAC;AAiBM,IAAM,oBAA4C,cAAE,MAAM;AAAA,EAC/D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;;;AK7IM,SAAS,iBACd,QACsC;AACtC,MAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AAC1B,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,IAAI,kCAAkC;AAErE,MAAI,gBAAgB,KAAK,OAAK,MAAM,uBAAuB,GAAG;AAC5D,WAAO;AAAA,EACT,WACE,gBAAgB;AAAA,IACd,OAAK,MAAM,6BAA6B,MAAM;AAAA,EAChD,GACA;AACA,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,mCACP,SAC2E;AAC3E,MACE,OAAO,YAAY,YACnB,YAAY,SACX,QAAQ,SAAS;AAAA,EAChB,QAAQ,SAAS;AAAA,EACjB,qBAAqB;AAAA,EACrB,8BAA8B,UAChC;AACA,WAAO;AAAA,EACT,WACE,OAAO,YAAY,YACnB,YAAY,QACZ,aAAa,YACZ,MAAM,QAAQ,QAAQ,OAAO;AAAA,EAC5B,mCAAmC,UACrC;AACA,WAAO;AAAA,EACT,WACE,OAAO,YAAY,YACnB,YAAY,QACZ,UAAU,WACV,aAAa,WACb,OAAO,QAAQ,YAAY,YAC3B,CAAC,UAAU,QAAQ,aAAa,MAAM,EAAE,SAAS,QAAQ,IAAI,GAC7D;AACA,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;;;AC5CO,SAAS,mBAAmB,aAA0C;AAd7E,MAAAC,MAAA;AAeE,QAAM,QAAuB,CAAC;AAE9B,aAAW,cAAc,aAAa;AACpC,QAAI;AAEJ,QAAI;AACF,YAAM,IAAI,IAAI,WAAW,GAAG;AAAA,IAC9B,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,gBAAgB,WAAW,GAAG,EAAE;AAAA,IAClD;AAEA,YAAQ,IAAI,UAAU;AAAA,MACpB,KAAK;AAAA,MACL,KAAK,UAAU;AACb,aAAIA,OAAA,WAAW,gBAAX,gBAAAA,KAAwB,WAAW,WAAW;AAChD,gBAAM,KAAK,EAAE,MAAM,SAAS,OAAO,IAAI,CAAC;AAAA,QAC1C,OAAO;AACL,cAAI,CAAC,WAAW,aAAa;AAC3B,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,UAAU,WAAW;AAAA,UACvB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,KAAK,SAAS;AACZ,YAAI;AACJ,YAAI;AACJ,YAAI;AAEJ,YAAI;AACF,WAAC,QAAQ,aAAa,IAAI,WAAW,IAAI,MAAM,GAAG;AAClD,qBAAW,OAAO,MAAM,GAAG,EAAE,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,QAC9C,SAAS,OAAO;AACd,gBAAM,IAAI,MAAM,8BAA8B,WAAW,GAAG,EAAE;AAAA,QAChE;AAEA,YAAI,YAAY,QAAQ,iBAAiB,MAAM;AAC7C,gBAAM,IAAI,MAAM,4BAA4B,WAAW,GAAG,EAAE;AAAA,QAC9D;AAEA,aAAI,gBAAW,gBAAX,mBAAwB,WAAW,WAAW;AAChD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,OAAO,+BAA+B,aAAa;AAAA,UACrD,CAAC;AAAA,QACH,YAAW,gBAAW,gBAAX,mBAAwB,WAAW,UAAU;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,cACJ,+BAA+B,aAAa;AAAA,YAC9C;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AACL,cAAI,CAAC,WAAW,aAAa;AAC3B,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,UAAU,WAAW;AAAA,UACvB,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,IAAI,MAAM,6BAA6B,IAAI,QAAQ,EAAE;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnGA,IAAAC,mBAA2B;AAG3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AALhC,IAAAE;AAOO,IAAM,yBAAN,cAAqC,4BAAW;AAAA,EAKrD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,EACF,GAGG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAXzB,SAAkBG,OAAU;AAa1B,SAAK,kBAAkB;AAAA,EACzB;AAAA,EAEA,OAAO,WAAW,OAAiD;AACjE,WAAO,4BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AAnBoBE,MAAAD;;;ACEb,SAAS,sBAEd,UAA4B,SAA6B;AAZ3D,MAAAE;AAaE,QAAM,SAAQA,OAAA,mCAAS,UAAT,OAAAA,OAAmB,CAAC;AAClC,QAAM,eAA8B,CAAC;AAErC,aAAW,WAAW,UAAU;AAC9B,UAAM,EAAE,MAAM,SAAS,iBAAiB,yBAAyB,IAC/D;AAEF,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,2BACL;AAAA,YACE,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,YAC9B,GAAG,mBAAmB,wBAAwB;AAAA,UAChD,IACA;AAAA,QACN,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,mBAAmB,MAAM;AAC3B,uBAAa,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAChD;AAAA,QACF;AAGA,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,YAC9B,GAAG,gBAAgB;AAAA,cACjB,CAAC,EAAE,YAAY,UAAU,KAAK,OAAqB;AAAA,gBACjD,MAAM;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAGD,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,gBAAgB,IAAI,CAAC,mBAAmC;AAC/D,gBAAI,EAAE,YAAY,iBAAiB;AACjC,oBAAM,IAAI,uBAAuB;AAAA,gBAC/B,iBAAiB;AAAA,gBACjB,SACE,wCACA,KAAK,UAAU,cAAc;AAAA,cACjC,CAAC;AAAA,YACH;AAEA,kBAAM,EAAE,YAAY,UAAU,OAAO,IAAI;AAEzC,kBAAMC,QAAO,MAAM,QAAQ;AAC3B,oBAAOA,SAAA,gBAAAA,MAAM,qCAAoC,OAC7C;AAAA,cACE,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,QAAQA,MAAK,iCAAiC,MAAM;AAAA,cACpD,sBACEA,MAAK,iCAAiC,MAAM;AAAA,YAChD,IACA;AAAA,cACE,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACN,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AAEX;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,uBAAuB;AAAA,UAC/B,iBAAiB;AAAA,UACjB,SAAS,qBAAqB,gBAAgB;AAAA,QAChD,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ATzFO,SAAS,kBAA0D;AAAA,EACxE;AAAA,EACA;AACF,GAGuB;AACrB,MAAI,OAAO,UAAU,QAAQ,OAAO,YAAY,MAAM;AACpD,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,UAAU,QAAQ,OAAO,YAAY,MAAM;AACpD,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,MAAI,OAAO,UAAU,QAAQ,OAAO,OAAO,WAAW,UAAU;AAC9D,UAAM,IAAI,oCAAmB;AAAA,MAC3B;AAAA,MACA,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,MAAI,OAAO,UAAU,MAAM;AAEzB,QAAI,OAAO,OAAO,WAAW,UAAU;AACrC,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ,OAAO;AAAA,MACf,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,YAAY,MAAM;AAC3B,UAAM,aAAa,iBAAiB,OAAO,QAAQ;AAEnD,QAAI,eAAe,SAAS;AAC1B,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WACJ,eAAe,gBACX,sBAAsB,OAAO,UAAyB;AAAA,MACpD;AAAA,IACF,CAAC,IACA,OAAO;AAEd,UAAM,uBAAmB,0CAAkB;AAAA,MACzC,OAAO;AAAA,MACP,QAAQ,cAAE,MAAM,iBAAiB;AAAA,IACnC,CAAC;AAED,QAAI,CAAC,iBAAiB,SAAS;AAC7B,YAAM,IAAI,oCAAmB;AAAA,QAC3B;AAAA,QACA,SAAS;AAAA,QACT,OAAO,iBAAiB;AAAA,MAC1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,aAAa;AAC/B;;;AUxFO,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA;AACF,GAGuB;AACrB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,eAAe;AAAA,EAC9B;AACF;AAEO,SAAS,sBACd,QACA,QACoB;AACpB,SAAO;AAAA,IACL,cAAc,OAAO,eAAe,OAAO;AAAA,IAC3C,kBAAkB,OAAO,mBAAmB,OAAO;AAAA,IACnD,aAAa,OAAO,cAAc,OAAO;AAAA,EAC3C;AACF;;;ACnDA,IAAM,wBAAwB;AAC9B,IAAM,wBACJ;AACF,IAAM,yBAAyB;AAExB,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA,eAAe,UAAU,OAAO,wBAAwB;AAAA,EACxD,eAAe,UAAU,OACrB,wBACA;AACN,GAKW;AACT,SAAO;AAAA,IACL,UAAU,QAAQ,OAAO,SAAS,IAAI,SAAS;AAAA,IAC/C,UAAU,QAAQ,OAAO,SAAS,IAAI,KAAK;AAAA;AAAA,IAC3C;AAAA,IACA,UAAU,OAAO,KAAK,UAAU,MAAM,IAAI;AAAA,IAC1C;AAAA,EACF,EACG,OAAO,UAAQ,QAAQ,IAAI,EAC3B,KAAK,IAAI;AACd;;;AC7BA,IAAAC,oBAQO;AACP,IAAAC,yBAAoD;AACpD,IAAAC,mBAA8C;;;ACRvC,SAAS,0BACd,QACwB;AACxB,QAAM,SAAS,OAAO,YAAY,IAAI,gBAAsB,CAAC;AAE7D,EAAC,OAAkC,OAAO,aAAa,IAAI,MAAM;AAC/D,UAAM,SAAS,OAAO,UAAU;AAChC,WAAO;AAAA,MACL,MAAM,OAAmC;AACvC,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,eAAO,OAAO,EAAE,MAAM,MAAM,OAAO,OAAU,IAAI,EAAE,MAAM,OAAO,MAAM;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ADkCA,IAAM,yBAAsE;AAAA,EAC1E,MAAM;AAAA,EACN,YAAY;AAAA,EAEZ,sBAAsB,EAAE,OAAO,UAAU,GAAG;AAC1C,WAAO,EAAE,SAAS,MAAM,OAAO,EAAE,SAAS,OAAO,UAAU,EAAE;AAAA,EAC/D;AAAA,EAEA,oBACE,OACA,SAK6B;AAC7B,WAAO,UAAU,SACb;AAAA,MACE,SAAS;AAAA,MACT,OAAO,IAAI,uBAAuB;AAAA,QAChC,SAAS;AAAA,QACT,MAAM,QAAQ;AAAA,QACd,UAAU,QAAQ;AAAA,QAClB,OAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,IACA,EAAE,SAAS,MAAM,MAAM;AAAA,EAC7B;AAAA,EAEA,sBAAsB;AACpB,UAAM,IAAI,gDAA8B;AAAA,MACtC,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,uBAAuB,CAC3B,YACwD;AAAA,EACxD,MAAM;AAAA,EACN,YAAY,OAAO;AAAA,EAEnB,sBAAsB,EAAE,OAAO,UAAU,GAAG;AAC1C,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA;AAAA,QAEL,SAAS;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,oBAAoB,OAAwD;AAC1E,eAAO,0CAAkB,EAAE,OAAO,OAAO,CAAC;AAAA,EAC5C;AAAA,EAEA,sBAAsB;AACpB,UAAM,IAAI,gDAA8B;AAAA,MACtC,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,sBAAsB,CAC1B,WACuE;AAEvE,QAAM,EAAE,SAAS,GAAG,WAAW,IAAI,OAAO;AAE1C,SAAO;AAAA,IACL,MAAM;AAAA;AAAA;AAAA;AAAA,IAKN,YAAY;AAAA,MACV,SAAS;AAAA,MACT,MAAM;AAAA,MACN,YAAY;AAAA,QACV,UAAU,EAAE,MAAM,SAAS,OAAO,WAAW;AAAA,MAC/C;AAAA,MACA,UAAU,CAAC,UAAU;AAAA,MACrB,sBAAsB;AAAA,IACxB;AAAA,IAEA,sBAAsB,EAAE,OAAO,cAAc,cAAc,aAAa,GAAG;AA1I/E,UAAAC;AA4IM,UAAI,KAAC,gCAAa,KAAK,KAAK,KAAC,+BAAY,MAAM,QAAQ,GAAG;AACxD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,aAAa,MAAM;AACzB,YAAM,cAA8B,CAAC;AAErC,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,UAAU,WAAW,CAAC;AAC5B,cAAM,aAAS,0CAAkB,EAAE,OAAO,SAAS,OAAO,CAAC;AAM3D,YAAI,MAAM,WAAW,SAAS,KAAK,CAAC,cAAc;AAChD;AAAA,QACF;AAEA,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,QACT;AAEA,oBAAY,KAAK,OAAO,KAAK;AAAA,MAC/B;AAGA,YAAM,yBAAwBA,OAAA,6CAAc,WAAd,OAAAA,OAAwB;AAEtD,UAAI,YAAY;AAEhB,UAAI,cAAc;AAChB,qBAAa;AAAA,MACf;AAEA,UAAI,wBAAwB,GAAG;AAC7B,qBAAa;AAAA,MACf;AAEA,mBAAa,YACV,MAAM,qBAAqB,EAC3B,IAAI,aAAW,KAAK,UAAU,OAAO,CAAC,EACtC,KAAK,GAAG;AAEX,UAAI,cAAc;AAChB,qBAAa;AAAA,MACf;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,oBACE,OACkC;AAElC,UAAI,KAAC,gCAAa,KAAK,KAAK,KAAC,+BAAY,MAAM,QAAQ,GAAG;AACxD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,aAAa,MAAM;AAGzB,iBAAW,WAAW,YAAY;AAChC,cAAM,aAAS,0CAAkB,EAAE,OAAO,SAAS,OAAO,CAAC;AAC3D,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,SAAS,MAAM,OAAO,WAA6B;AAAA,IAC9D;AAAA,IAEA,oBACE,gBACA;AACA,UAAI,oBAAoB;AAExB,aAAO;AAAA,QACL,eAAe;AAAA,UACb,IAAI,gBAAsD;AAAA,YACxD,UAAU,OAAO,YAAY;AAC3B,sBAAQ,MAAM,MAAM;AAAA,gBAClB,KAAK,UAAU;AACb,wBAAM,QAAQ,MAAM;AAGpB,yBAEE,oBAAoB,MAAM,QAC1B,qBACA;AACA,+BAAW,QAAQ,MAAM,iBAAiB,CAAC;AAAA,kBAC7C;AAEA;AAAA,gBACF;AAAA,gBAEA,KAAK;AAAA,gBACL,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,6BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,gBAEF,SAAS;AACP,wBAAM,mBAA0B;AAChC,wBAAM,IAAI;AAAA,oBACR,2BAA2B,gBAAgB;AAAA,kBAC7C;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,qBAAqB,CACzB,eACsC;AACtC,SAAO;AAAA,IACL,MAAM;AAAA;AAAA;AAAA;AAAA,IAKN,YAAY;AAAA,MACV,SAAS;AAAA,MACT,MAAM;AAAA,MACN,YAAY;AAAA,QACV,QAAQ,EAAE,MAAM,UAAU,MAAM,WAAW;AAAA,MAC7C;AAAA,MACA,UAAU,CAAC,QAAQ;AAAA,MACnB,sBAAsB;AAAA,IACxB;AAAA,IAEA,oBAAoB,OAAsD;AAExE,UAAI,KAAC,gCAAa,KAAK,KAAK,OAAO,MAAM,WAAW,UAAU;AAC5D,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,IAAI,sCAAoB;AAAA,YAC7B;AAAA,YACA,OACE;AAAA,UACJ,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,SAAS,MAAM;AAErB,aAAO,WAAW,SAAS,MAAc,IACrC,EAAE,SAAS,MAAM,OAAO,OAAe,IACvC;AAAA,QACE,SAAS;AAAA,QACT,OAAO,IAAI,sCAAoB;AAAA,UAC7B;AAAA,UACA,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACN;AAAA,IAEA,wBAAwB;AAEtB,YAAM,IAAI,gDAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,IAEA,sBAAsB;AAEpB,YAAM,IAAI,gDAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEO,SAAS,kBAA0B;AAAA,EACxC;AAAA,EACA;AAAA,EACA;AACF,GAIkC;AAChC,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,aAAO,yBAAqB,2BAAS,MAAO,CAAC;AAAA,IAC/C,KAAK;AACH,aAAO,wBAAoB,2BAAS,MAAO,CAAC;AAAA,IAC9C,KAAK;AACH,aAAO,mBAAmB,UAA4B;AAAA,IACxD,KAAK;AACH,aAAO;AAAA,IACT,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,MAAM,uBAAuB,gBAAgB,EAAE;AAAA,IAC3D;AAAA,EACF;AACF;;;AEtWO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAOG;AACD,MACE,UAAU,QACV,WAAW,YACX,WAAW,WACX,WAAW,UACX,WAAW,aACX;AACA,UAAM,IAAI,qBAAqB;AAAA,MAC7B,WAAW;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,MAAI,WAAW,aAAa;AAC1B,QAAI,SAAS,UAAU,SAAS,QAAQ;AACtC,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,qBAAqB,MAAM;AAC7B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,UAAU;AACvB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,SAAS;AACtB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,WAAW,QAAQ;AACrB,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,qBAAqB,MAAM;AAC7B,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,cAAc,MAAM;AACtB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,eAAW,SAAS,YAAY;AAC9B,UAAI,OAAO,UAAU,UAAU;AAC7B,cAAM,IAAI,qBAAqB;AAAA,UAC7B,WAAW;AAAA,UACX;AAAA,UACA,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;AzBzHA,IAAM,yBAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AA4P1E,eAAsB,eAA+B;AAAA,EACnD;AAAA,EACA,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,WAAW;AAAA,IACT,YAAAC,cAAa;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GA6B4C;AAC1C,gCAA8B;AAAA,IAC5B;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,iBAAiB,kBAAkB;AAAA,IACvC;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF,CAAC;AAGD,MAAI,eAAe,SAAS,eAAe,SAAS,QAAW;AAC7D,WAAO;AAAA,EACT;AAEA,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,EACtC,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB;AAAA,UACvB,aAAa;AAAA,UACb;AAAA,QACF,CAAC;AAAA,QACD,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,QAC1D;AAAA,QACA,aACE,eAAe,cAAc,OACzB,EAAE,OAAO,MAAM,KAAK,UAAU,eAAe,UAAU,EAAE,IACzD;AAAA,QACN,kBAAkB;AAAA,QAClB,yBAAyB;AAAA,QACzB,sBAAsB,eAAe;AAAA,QACrC,oBAAoB;AAAA,MACtB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAxYtB,UAAAC,MAAA;AA0YM,UAAI,SAAS,UAAU,QAAQ,MAAM;AACnC,eAAO,MAAM;AAAA,MACf;AAEA,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AACJ,UAAI;AAEJ,cAAQ,MAAM;AAAA,QACZ,KAAK,QAAQ;AACX,gBAAM,qBAAqB,kBAAkB;AAAA,YAC3C,QAAQ;AAAA,cACN,QACE,eAAe,cAAc,OACzB,sBAAsB,EAAE,QAAQ,OAAO,CAAC,IACxC,MAAM,4BACN,SACA,sBAAsB;AAAA,gBACpB,QAAQ;AAAA,gBACR,QAAQ,eAAe;AAAA,cACzB,CAAC;AAAA,cACP;AAAA,cACA;AAAA,YACF;AAAA,YACA,OAAO;AAAA,UACT,CAAC;AAED,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,YACR,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AAED,gBAAM,iBAAiB,MAAM;AAAA,YAAM,MACjC,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM,mBAAmB;AAAA,kBAClC;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,oBAAoB;AAAA;AAAA,kBAGpB,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,IAAI,OAAMC,UAAQ;AAhdhC,oBAAAD,MAAAE,KAAA;AAidgB,sBAAMC,UAAS,MAAM,MAAM,WAAW;AAAA,kBACpC,MAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,QAAQ,eAAe;AAAA,oBACvB,MAAM;AAAA,oBACN,aAAa;AAAA,kBACf;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B,aAAa,mBAAmB;AAAA,kBAChC,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAED,sBAAM,eAAe;AAAA,kBACnB,KAAID,OAAAF,OAAAG,QAAO,aAAP,gBAAAH,KAAiB,OAAjB,OAAAE,MAAuBH,YAAW;AAAA,kBACtC,YAAW,WAAAI,QAAO,aAAP,mBAAiB,cAAjB,YAA8B,YAAY;AAAA,kBACrD,UAAS,WAAAA,QAAO,aAAP,mBAAiB,YAAjB,YAA4B,MAAM;AAAA,gBAC7C;AAEA,oBAAIA,QAAO,SAAS,QAAW;AAC7B,wBAAM,IAAI,uBAAuB;AAAA,oBAC/B,SACE;AAAA,oBACF,UAAU;AAAA,oBACV,OAAO,4BAA4BA,QAAO,KAAK;AAAA,kBACjD,CAAC;AAAA,gBACH;AAGA,gBAAAF,MAAK;AAAA,kBACH,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4BE,QAAO;AAAA,sBACnC,sBAAsB,EAAE,QAAQ,MAAMA,QAAO,KAAK;AAAA,sBAClD,kBAAkB,aAAa;AAAA,sBAC/B,qBAAqB,aAAa;AAAA,sBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,sBAErC,yBAAyBA,QAAO,MAAM;AAAA,sBACtC,6BACEA,QAAO,MAAM;AAAA;AAAA,sBAGf,kCAAkC,CAACA,QAAO,YAAY;AAAA,sBACtD,sBAAsB,aAAa;AAAA,sBACnC,yBAAyB,aAAa;AAAA,sBACtC,8BAA8BA,QAAO,MAAM;AAAA,sBAC3C,kCACEA,QAAO,MAAM;AAAA,oBACjB;AAAA,kBACF,CAAC;AAAA,gBACH;AAEA,uBAAO,EAAE,GAAGA,SAAQ,YAAYA,QAAO,MAAM,aAAa;AAAA,cAC5D;AAAA,YACF,CAAC;AAAA,UACH;AAEA,mBAAS,eAAe;AACxB,yBAAe,eAAe;AAC9B,kBAAQ,eAAe;AACvB,qBAAW,eAAe;AAC1B,wBAAc,eAAe;AAC7B,qBAAW,eAAe;AAC1B,mCAAyB,eAAe;AACxC,qBAAUH,OAAA,eAAe,YAAf,OAAAA,OAA0B,CAAC;AACrC,qBAAW,eAAe;AAE1B;AAAA,QACF;AAAA,QAEA,KAAK,QAAQ;AACX,gBAAM,qBAAqB,kBAAkB;AAAA,YAC3C,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,YACnC,OAAO;AAAA,UACT,CAAC;AAED,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,YACR,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AACD,gBAAM,cAAc,mBAAmB;AAEvC,gBAAM,iBAAiB,MAAM;AAAA,YAAM,MACjC,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM;AAAA,kBACf;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,oBAAoB;AAAA;AAAA,kBAGpB,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,IAAI,OAAMC,UAAQ;AAxkBhC,oBAAAD,MAAAE,KAAA;AAykBgB,sBAAMC,UAAS,MAAM,MAAM,WAAW;AAAA,kBACpC,MAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,MAAM;AAAA,sBACJ,MAAM;AAAA,sBACN,MAAM,kCAAc;AAAA,sBACpB,aACE,gDAAqB;AAAA,sBACvB,YAAY,eAAe;AAAA,oBAC7B;AAAA,kBACF;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B;AAAA,kBACA,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAED,sBAAM,cAAaD,OAAAF,OAAAG,QAAO,cAAP,gBAAAH,KAAmB,OAAnB,gBAAAE,IAAuB;AAE1C,sBAAM,eAAe;AAAA,kBACnB,KAAI,WAAAC,QAAO,aAAP,mBAAiB,OAAjB,YAAuBJ,YAAW;AAAA,kBACtC,YAAW,WAAAI,QAAO,aAAP,mBAAiB,cAAjB,YAA8B,YAAY;AAAA,kBACrD,UAAS,WAAAA,QAAO,aAAP,mBAAiB,YAAjB,YAA4B,MAAM;AAAA,gBAC7C;AAEA,oBAAI,eAAe,QAAW;AAC5B,wBAAM,IAAI,uBAAuB;AAAA,oBAC/B,SAAS;AAAA,oBACT,UAAU;AAAA,oBACV,OAAO,4BAA4BA,QAAO,KAAK;AAAA,kBACjD,CAAC;AAAA,gBACH;AAGA,gBAAAF,MAAK;AAAA,kBACH,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4BE,QAAO;AAAA,sBACnC,sBAAsB,EAAE,QAAQ,MAAM,WAAW;AAAA,sBACjD,kBAAkB,aAAa;AAAA,sBAC/B,qBAAqB,aAAa;AAAA,sBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,sBAErC,yBAAyBA,QAAO,MAAM;AAAA,sBACtC,6BACEA,QAAO,MAAM;AAAA;AAAA,sBAGf,kCAAkC,CAACA,QAAO,YAAY;AAAA,sBACtD,sBAAsB,aAAa;AAAA,sBACnC,yBAAyB,aAAa;AAAA,sBACtC,6BAA6BA,QAAO,MAAM;AAAA,sBAC1C,8BACEA,QAAO,MAAM;AAAA,oBACjB;AAAA,kBACF,CAAC;AAAA,gBACH;AAEA,uBAAO,EAAE,GAAGA,SAAQ,YAAY,aAAa;AAAA,cAC/C;AAAA,YACF,CAAC;AAAA,UACH;AAEA,mBAAS,eAAe;AACxB,yBAAe,eAAe;AAC9B,kBAAQ,eAAe;AACvB,qBAAW,eAAe;AAC1B,wBAAc,eAAe;AAC7B,qBAAW,eAAe;AAC1B,mCAAyB,eAAe;AACxC,qBAAU,oBAAe,YAAf,YAA0B,CAAC;AACrC,qBAAW,eAAe;AAE1B;AAAA,QACF;AAAA,QAEA,KAAK,QAAW;AACd,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QAEA,SAAS;AACP,gBAAM,mBAA0B;AAChC,gBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,QACzD;AAAA,MACF;AAEA,YAAM,kBAAc,sCAAc,EAAE,MAAM,OAAO,CAAC;AAElD,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,YAAY;AAAA,UACnB,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C,CAAC;AAAA,MACH;AAEA,YAAM,mBAAmB,eAAe;AAAA,QACtC,YAAY;AAAA,QACZ;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C;AAAA,MACF;AAEA,UAAI,CAAC,iBAAiB,SAAS;AAC7B,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,iBAAiB;AAAA,UACxB,MAAM;AAAA,UACN;AAAA,UACA,OAAO,4BAA4B,KAAK;AAAA,QAC1C,CAAC;AAAA,MACH;AAGA,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,4BAA4B;AAAA,YAC5B,sBAAsB;AAAA,cACpB,QAAQ,MAAM,KAAK,UAAU,iBAAiB,KAAK;AAAA,YACrD;AAAA,YAEA,yBAAyB,MAAM;AAAA,YAC/B,6BAA6B,MAAM;AAAA,UACrC;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,4BAA4B;AAAA,QACrC,QAAQ,iBAAiB;AAAA,QACzB;AAAA,QACA,OAAO,4BAA4B,KAAK;AAAA,QACxC;AAAA,QACA;AAAA,QACA,UAAU;AAAA,UACR,GAAG;AAAA,UACH,SAAS,2CAAa;AAAA,QACxB;AAAA,QACA;AAAA,QACA,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,IAAM,8BAAN,MAAwE;AAAA,EAUtE,YAAY,SAST;AACD,SAAK,SAAS,QAAQ;AACtB,SAAK,eAAe,QAAQ;AAC5B,SAAK,QAAQ,QAAQ;AACrB,SAAK,WAAW,QAAQ;AACxB,SAAK,gCAAgC,QAAQ;AAC7C,SAAK,WAAW,QAAQ;AACxB,SAAK,UAAU,QAAQ;AACvB,SAAK,WAAW,QAAQ;AAAA,EAC1B;AAAA,EAEA,eAAe,MAA+B;AAnwBhD,QAAAH;AAowBI,WAAO,IAAI,SAAS,KAAK,UAAU,KAAK,MAAM,GAAG;AAAA,MAC/C,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;A0BrwBA,IAAAI,yBAAkC;AAClC,IAAAC,mBAKO;;;ACPA,IAAM,iBAAN,MAAwB;AAAA,EAAxB;AACL,SAAQ,SAGmC,EAAE,MAAM,UAAU;AAE7D,SAAQ,WAA6C;AACrD,SAAQ,UAAkD;AAAA;AAAA,EAE1D,IAAI,QAAoB;AACtB,QAAI,KAAK,SAAS;AAChB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,UAAU,IAAI,QAAW,CAAC,SAAS,WAAW;AACjD,UAAI,KAAK,OAAO,SAAS,YAAY;AACnC,gBAAQ,KAAK,OAAO,KAAK;AAAA,MAC3B,WAAW,KAAK,OAAO,SAAS,YAAY;AAC1C,eAAO,KAAK,OAAO,KAAK;AAAA,MAC1B;AAEA,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IACjB,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,QAAQ,OAAgB;AAjC1B,QAAAC;AAkCI,SAAK,SAAS,EAAE,MAAM,YAAY,MAAM;AAExC,QAAI,KAAK,SAAS;AAChB,OAAAA,OAAA,KAAK,aAAL,gBAAAA,KAAA,WAAgB;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,OAAO,OAAsB;AAzC/B,QAAAA;AA0CI,SAAK,SAAS,EAAE,MAAM,YAAY,MAAM;AAExC,QAAI,KAAK,SAAS;AAChB,OAAAA,OAAA,KAAK,YAAL,gBAAAA,KAAA,WAAe;AAAA,IACjB;AAAA,EACF;AACF;;;ACvCO,SAAS,0BAId;AACA,MAAI;AACJ,MAAI;AAEJ,QAAM,UAAU,IAAI,QAAW,CAAC,KAAK,QAAQ;AAC3C,cAAU;AACV,aAAS;AAAA,EACX,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACnBO,SAAS,yBAId;AACA,MAAI,qBAAuD,CAAC;AAC5D,MAAI,aAAwD;AAC5D,MAAI,WAAW;AACf,MAAI,mBAAmB,wBAA8B;AAErD,QAAM,cAAc,YAAY;AAE9B,QAAI,YAAY,mBAAmB,WAAW,GAAG;AAC/C,+CAAY;AACZ;AAAA,IACF;AAIA,QAAI,mBAAmB,WAAW,GAAG;AACnC,yBAAmB,wBAA8B;AACjD,YAAM,iBAAiB;AACvB,aAAO,YAAY;AAAA,IACrB;AAEA,QAAI;AACF,YAAM,EAAE,OAAO,KAAK,IAAI,MAAM,mBAAmB,CAAC,EAAE,KAAK;AAEzD,UAAI,MAAM;AAER,2BAAmB,MAAM;AAGzB,YAAI,mBAAmB,SAAS,GAAG;AACjC,gBAAM,YAAY;AAAA,QACpB,WAAW,UAAU;AACnB,mDAAY;AAAA,QACd;AAAA,MACF,OAAO;AAEL,iDAAY,QAAQ;AAAA,MACtB;AAAA,IACF,SAAS,OAAO;AAEd,+CAAY,MAAM;AAClB,yBAAmB,MAAM;AAEzB,UAAI,YAAY,mBAAmB,WAAW,GAAG;AAC/C,iDAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,IAAI,eAAkB;AAAA,MAC5B,MAAM,iBAAiB;AACrB,qBAAa;AAAA,MACf;AAAA,MACA,MAAM;AAAA,MACN,MAAM,SAAS;AACb,mBAAW,UAAU,oBAAoB;AACvC,gBAAM,OAAO,OAAO;AAAA,QACtB;AACA,6BAAqB,CAAC;AACtB,mBAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,IACD,WAAW,CAAC,gBAAmC;AAC7C,UAAI,UAAU;AACZ,cAAM,IAAI,MAAM,iDAAiD;AAAA,MACnE;AAEA,yBAAmB,KAAK,YAAY,UAAU,CAAC;AAC/C,uBAAiB,QAAQ;AAAA,IAC3B;AAAA,IACA,OAAO,MAAM;AACX,iBAAW;AACX,uBAAiB,QAAQ;AAEzB,UAAI,mBAAmB,WAAW,GAAG;AACnC,iDAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACF;;;AC3FO,SAAS,MAAc;AAD9B,MAAAC,MAAA;AAEE,UAAO,MAAAA,OAAA,yCAAY,gBAAZ,gBAAAA,KAAyB,UAAzB,YAAkC,KAAK,IAAI;AACpD;;;AJqDA,IAAMC,0BAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAwPnE,SAAS,aAAsD;AAAA,EACpE;AAAA,EACA,QAAQ;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B;AAAA,EACA,WAAW;AAAA,IACT,YAAAC,cAAaD;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,IAC7B,KAAAE,OAAM;AAAA,EACR,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GA0B0D;AACxD,gCAA8B;AAAA,IAC5B;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB,kBAAkB,EAAE,QAAQ,QAAQ,YAAY,CAAC;AAGxE,MAAI,eAAe,SAAS,eAAe,SAAS,QAAW;AAC7D,WAAO;AAAA,EACT;AAEA,SAAO,IAAI,0BAA0B;AAAA,IACnC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,uBAAuB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,YAAAD;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,EACF,CAAC;AACH;AAEA,IAAM,4BAAN,MAEA;AAAA,EAuBE,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAAD;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,EACF,GAmBG;AA5DH,SAAiB,gBAAgB,IAAI,eAAuB;AAC5D,SAAiB,eAAe,IAAI,eAAmC;AACvE,SAAiB,0BAA0B,IAAI,eAE7C;AACF,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,iBACf,IAAI,eAA6C;AACnD,SAAiB,kBACf,IAAI,eAA8C;AAEpD,SAAiB,mBACf,uBAAkD;AA+ClD,UAAM,EAAE,YAAY,MAAM,IAAI,eAAe;AAAA,MAC3C,YAAY;AAAA,IACd,CAAC;AAED,UAAM,0BAA0B,2BAA2B;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,IACtC,CAAC;AAED,UAAM,SAAS,UAAU,SAAS;AAClC,UAAM,OAAO;AAEb,eAAW;AAAA,MACT,MAAM;AAAA,MACN,YAAY,0BAA0B;AAAA,QACpC;AAAA,QACA,YAAY;AAAA,UACV,GAAG,sBAAsB;AAAA,YACvB,aAAa;AAAA,YACb;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA;AAAA,UAEH,aAAa;AAAA,YACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,UAC1D;AAAA,UACA,aACE,eAAe,cAAc,OACzB,EAAE,OAAO,MAAM,KAAK,UAAU,eAAe,UAAU,EAAE,IACzD;AAAA,UACN,kBAAkB;AAAA,UAClB,yBAAyB;AAAA,UACzB,sBAAsB,eAAe;AAAA,UACrC,oBAAoB;AAAA,QACtB;AAAA,MACF,CAAC;AAAA,MACD;AAAA,MACA,aAAa;AAAA,MACb,IAAI,OAAM,aAAY;AAEpB,YAAI,SAAS,UAAU,QAAQ,MAAM;AACnC,iBAAO,MAAM;AAAA,QACf;AAEA,YAAI;AACJ,YAAI;AAKJ,gBAAQ,MAAM;AAAA,UACZ,KAAK,QAAQ;AACX,kBAAM,qBAAqB,kBAAkB;AAAA,cAC3C,QAAQ;AAAA,gBACN,QACE,eAAe,cAAc,OACzB,sBAAsB,EAAE,QAAQ,OAAO,CAAC,IACxC,MAAM,4BACN,SACA,sBAAsB;AAAA,kBACpB,QAAQ;AAAA,kBACR,QAAQ,eAAe;AAAA,gBACzB,CAAC;AAAA,gBACP;AAAA,gBACA;AAAA,cACF;AAAA,cACA,OAAO;AAAA,YACT,CAAC;AAED,0BAAc;AAAA,cACZ,MAAM;AAAA,gBACJ,MAAM;AAAA,gBACN,QAAQ,eAAe;AAAA,gBACvB,MAAM;AAAA,gBACN,aAAa;AAAA,cACf;AAAA,cACA,GAAG,oBAAoB,QAAQ;AAAA,cAC/B,aAAa,mBAAmB;AAAA,cAChC,QAAQ,MAAM,6BAA6B;AAAA,gBACzC,QAAQ;AAAA,gBACR,wBAAwB,MAAM;AAAA,gBAC9B,kBAAkB,MAAM;AAAA,cAC1B,CAAC;AAAA,cACD,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF;AAEA,0BAAc;AAAA,cACZ,WAAW,CAAC,OAAO,eAAe;AAChC,wBAAQ,MAAM,MAAM;AAAA,kBAClB,KAAK;AACH,+BAAW,QAAQ,MAAM,SAAS;AAClC;AAAA,kBACF,KAAK;AAAA,kBACL,KAAK;AAAA,kBACL,KAAK;AACH,+BAAW,QAAQ,KAAK;AACxB;AAAA,gBACJ;AAAA,cACF;AAAA,YACF;AAEA;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,kBAAM,qBAAqB,kBAAkB;AAAA,cAC3C,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,cACnC,OAAO;AAAA,YACT,CAAC;AAED,0BAAc;AAAA,cACZ,MAAM;AAAA,gBACJ,MAAM;AAAA,gBACN,MAAM;AAAA,kBACJ,MAAM;AAAA,kBACN,MAAM,kCAAc;AAAA,kBACpB,aACE,gDAAqB;AAAA,kBACvB,YAAY,eAAe;AAAA,gBAC7B;AAAA,cACF;AAAA,cACA,GAAG,oBAAoB,QAAQ;AAAA,cAC/B,aAAa,mBAAmB;AAAA,cAChC,QAAQ,MAAM,6BAA6B;AAAA,gBACzC,QAAQ;AAAA,gBACR,wBAAwB,MAAM;AAAA,gBAC9B,kBAAkB,MAAM;AAAA,cAC1B,CAAC;AAAA,cACD,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF;AAEA,0BAAc;AAAA,cACZ,UAAU,OAAO,YAAY;AAC3B,wBAAQ,MAAM,MAAM;AAAA,kBAClB,KAAK;AACH,+BAAW,QAAQ,MAAM,aAAa;AACtC;AAAA,kBACF,KAAK;AAAA,kBACL,KAAK;AAAA,kBACL,KAAK;AACH,+BAAW,QAAQ,KAAK;AACxB;AAAA,gBACJ;AAAA,cACF;AAAA,YACF;AAEA;AAAA,UACF;AAAA,UAEA,KAAK,QAAW;AACd,kBAAM,IAAI;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,UAEA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,UACzD;AAAA,QACF;AAEA,cAAM;AAAA,UACJ,QAAQ,EAAE,QAAQ,UAAU,aAAa,QAAQ;AAAA,UACjD;AAAA,UACA;AAAA,QACF,IAAI,MAAM;AAAA,UAAM,MACd,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA,gBACH,oBAAoB;AAAA,kBAClB,OAAO,MAAM,YAAY;AAAA,gBAC3B;AAAA,gBACA,sBAAsB;AAAA,kBACpB,OAAO,MAAM,KAAK,UAAU,YAAY,MAAM;AAAA,gBAChD;AAAA,gBACA,oBAAoB;AAAA;AAAA,gBAGpB,iBAAiB,MAAM;AAAA,gBACvB,wBAAwB,MAAM;AAAA,gBAC9B,oCAAoC,SAAS;AAAA,gBAC7C,6BAA6B,SAAS;AAAA,gBACtC,mCAAmC,SAAS;AAAA,gBAC5C,8BAA8B,SAAS;AAAA,gBACvC,wBAAwB,SAAS;AAAA,gBACjC,wBAAwB,SAAS;AAAA,cACnC;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,aAAa;AAAA,YACb,IAAI,OAAMC,mBAAiB;AAAA,cACzB,kBAAkBD,KAAI;AAAA,cACtB,cAAAC;AAAA,cACA,QAAQ,MAAM,MAAM,SAAS,WAAW;AAAA,YAC1C;AAAA,UACF,CAAC;AAAA,QACH;AAEA,aAAK,eAAe,QAAQ,4BAAW,CAAC,CAAC;AAGzC,YAAI;AACJ,YAAI;AACJ,YAAI;AACJ,YAAIC;AACJ,YAAI;AAGJ,YAAI,kBAAkB;AACtB,YAAI,YAAY;AAChB,YAAI,WAIA;AAAA,UACF,IAAIH,YAAW;AAAA,UACf,WAAW,YAAY;AAAA,UACvB,SAAS,MAAM;AAAA,QACjB;AAIA,YAAI,mBAA0C;AAC9C,YAAI,eAAoC;AACxC,YAAI,eAAe;AACnB,YAAI,eAAe;AAEnB,cAAM,oBAAoB,OACvB,YAAY,IAAI,gBAAgB,WAAW,CAAC,EAC5C;AAAA,UACC,IAAI,gBAGF;AAAA,YACA,MAAM,UAAU,OAAO,YAA2B;AA7rBhE,kBAAAI,MAAA;AA+rBgB,kBAAI,cAAc;AAChB,sBAAM,iBAAiBH,KAAI,IAAI;AAE/B,+BAAe;AAEf,6BAAa,SAAS,wBAAwB;AAAA,kBAC5C,4BAA4B;AAAA,gBAC9B,CAAC;AAED,6BAAa,cAAc;AAAA,kBACzB,4BAA4B;AAAA,gBAC9B,CAAC;AAAA,cACH;AAGA,kBAAI,OAAO,UAAU,UAAU;AAC7B,mCAAmB;AACnB,6BAAa;AAEb,sBAAM,EAAE,OAAO,mBAAmB,OAAO,WAAW,QAClD,mCAAiB,eAAe;AAElC,oBACE,sBAAsB,UACtB,KAAC,kCAAgB,kBAAkB,iBAAiB,GACpD;AACA,wBAAM,mBACJ,eAAe,sBAAsB;AAAA,oBACnC,OAAO;AAAA,oBACP;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,cAAc,eAAe;AAAA,kBAC/B,CAAC;AAEH,sBACE,iBAAiB,WACjB,KAAC;AAAA,oBACC;AAAA,oBACA,iBAAiB,MAAM;AAAA,kBACzB,GACA;AAEA,uCAAmB;AACnB,mCAAe,iBAAiB,MAAM;AAEtC,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,QAAQ;AAAA,oBACV,CAAC;AAED,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,WAAW,iBAAiB,MAAM;AAAA,oBACpC,CAAC;AAED,gCAAY;AACZ,mCAAe;AAAA,kBACjB;AAAA,gBACF;AAEA;AAAA,cACF;AAEA,sBAAQ,MAAM,MAAM;AAAA,gBAClB,KAAK,qBAAqB;AACxB,6BAAW;AAAA,oBACT,KAAIG,OAAA,MAAM,OAAN,OAAAA,OAAY,SAAS;AAAA,oBACzB,YAAW,WAAM,cAAN,YAAmB,SAAS;AAAA,oBACvC,UAAS,WAAM,YAAN,YAAiB,SAAS;AAAA,kBACrC;AACA;AAAA,gBACF;AAAA,gBAEA,KAAK,UAAU;AAEb,sBAAI,cAAc,IAAI;AACpB,+BAAW,QAAQ,EAAE,MAAM,cAAc,UAAU,CAAC;AAAA,kBACtD;AAGA,iCAAe,MAAM;AAGrB,0BAAQ,4BAA4B,MAAM,KAAK;AAC/C,qCAAmB,MAAM;AAEzB,6BAAW,QAAQ,EAAE,GAAG,OAAO,OAAO,SAAS,CAAC;AAGhD,uBAAK,aAAa,QAAQ,KAAK;AAC/B,uBAAK,wBAAwB,QAAQ,gBAAgB;AACrD,uBAAK,gBAAgB,QAAQ;AAAA,oBAC3B,GAAG;AAAA,oBACH,SAAS,2CAAa;AAAA,kBACxB,CAAC;AAGD,wBAAM,mBAAmB,eAAe;AAAA,oBACtC;AAAA,oBACA;AAAA,sBACE,MAAM;AAAA,sBACN;AAAA,sBACA;AAAA,oBACF;AAAA,kBACF;AAEA,sBAAI,iBAAiB,SAAS;AAC5B,oBAAAD,UAAS,iBAAiB;AAC1B,yBAAK,cAAc,QAAQA,OAAM;AAAA,kBACnC,OAAO;AACL,4BAAQ,IAAI,uBAAuB;AAAA,sBACjC,SACE;AAAA,sBACF,OAAO,iBAAiB;AAAA,sBACxB,MAAM;AAAA,sBACN;AAAA,sBACA;AAAA,oBACF,CAAC;AACD,yBAAK,cAAc,OAAO,KAAK;AAAA,kBACjC;AAEA;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,6BAAW,QAAQ,KAAK;AACxB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA;AAAA,YAGA,MAAM,MAAM,YAAY;AACtB,kBAAI;AACF,sBAAM,aAAa,wBAAS;AAAA,kBAC1B,cAAc;AAAA,kBACd,kBAAkB;AAAA,kBAClB,aAAa;AAAA,gBACf;AAEA,6BAAa;AAAA,kBACX,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,4BAA4B;AAAA,sBAC5B,sBAAsB;AAAA,wBACpB,QAAQ,MAAM,KAAK,UAAUA,OAAM;AAAA,sBACrC;AAAA,sBACA,kBAAkB,SAAS;AAAA,sBAC3B,qBAAqB,SAAS;AAAA,sBAC9B,yBACE,SAAS,UAAU,YAAY;AAAA,sBAEjC,yBAAyB,WAAW;AAAA,sBACpC,6BACE,WAAW;AAAA;AAAA,sBAGb,kCAAkC,CAAC,YAAY;AAAA,sBAC/C,sBAAsB,SAAS;AAAA,sBAC/B,yBAAyB,SAAS;AAAA,sBAClC,6BAA6B,WAAW;AAAA,sBACxC,8BACE,WAAW;AAAA,oBACf;AAAA,kBACF,CAAC;AAAA,gBACH;AAGA,6BAAa,IAAI;AAGjB,yBAAS;AAAA,kBACP,0BAA0B;AAAA,oBACxB;AAAA,oBACA,YAAY;AAAA,sBACV,yBAAyB,WAAW;AAAA,sBACpC,6BACE,WAAW;AAAA,sBACb,sBAAsB;AAAA,wBACpB,QAAQ,MAAM,KAAK,UAAUA,OAAM;AAAA,sBACrC;AAAA,oBACF;AAAA,kBACF,CAAC;AAAA,gBACH;AAGA,uBAAM,qCAAW;AAAA,kBACf,OAAO;AAAA,kBACP,QAAAA;AAAA,kBACA;AAAA,kBACA,UAAU;AAAA,oBACR,GAAG;AAAA,oBACH,SAAS,2CAAa;AAAA,kBACxB;AAAA,kBACA;AAAA,kBACA,+BAA+B;AAAA,gBACjC;AAAA,cACF,SAASE,QAAO;AACd,2BAAW,MAAMA,MAAK;AAAA,cACxB,UAAE;AACA,yBAAS,IAAI;AAAA,cACf;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAEF,aAAK,iBAAiB,UAAU,iBAAiB;AAAA,MACnD;AAAA,IACF,CAAC,EACE,MAAM,WAAS;AAEd,WAAK,iBAAiB;AAAA,QACpB,IAAI,eAAe;AAAA,UACjB,MAAM,YAAY;AAChB,uBAAW,MAAM,KAAK;AAAA,UACxB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC,EACA,QAAQ,MAAM;AACb,WAAK,iBAAiB,MAAM;AAAA,IAC9B,CAAC;AAEH,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,IAAI,SAAS;AACX,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,IAAI,gCAAgC;AAClC,WAAO,KAAK,wBAAwB;AAAA,EACtC;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,sBAAoD;AACtD,WAAO;AAAA,MACL,KAAK,iBAAiB,OAAO;AAAA,QAC3B,IAAI,gBAAoD;AAAA,UACtD,UAAU,OAAO,YAAY;AAC3B,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,2BAAW,QAAQ,MAAM,MAAM;AAC/B;AAAA,cAEF,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cAEF,KAAK;AACH,2BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,cAEF,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,gBAAgC;AAClC,WAAO,KAAK,eAAe;AAAA,MACzB,KAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,IAAI,aAA0C;AAC5C,WAAO;AAAA,MACL,KAAK,iBAAiB,OAAO;AAAA,QAC3B,IAAI,gBAAmD;AAAA,UACrD,UAAU,OAAO,YAAY;AAC3B,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,2BAAW,QAAQ,MAAM,SAAS;AAClC;AAAA,cAEF,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cAEF,KAAK;AACH,2BAAW,MAAM,MAAM,KAAK;AAC5B;AAAA,cAEF,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,aAA6D;AAC/D,WAAO,0BAA0B,KAAK,iBAAiB,MAAM;AAAA,EAC/D;AAAA,EAEA,yBAAyB,UAA0B,MAAqB;AACtE,0BAAsB;AAAA,MACpB;AAAA,MACA,QAAQ,6BAAM;AAAA,MACd,YAAY,6BAAM;AAAA,MAClB,SAAS,2BAA2B,6BAAM,SAAS;AAAA,QACjD,aAAa;AAAA,MACf,CAAC;AAAA,MACD,QAAQ,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA,EAEA,qBAAqB,MAA+B;AA3gCtD,QAAAD;AA4gCI,WAAO,IAAI,SAAS,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC,GAAG;AAAA,MACxE,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;AKnhCA,IAAAE,yBAAkC;;;ACAlC,IAAAC,oBAYO;;;ACZP,IAAAC,oBAA4C;AAE5C,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,4BAAN,cAAwC,6BAAW;AAAA,EAMxD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,8BAA8B,QAAQ,SAAK;AAAA,MACnD;AAAA,IACF,CAAC;AAAA,EACH,GAKG;AACD,UAAM,EAAE,MAAAH,OAAM,SAAS,MAAM,CAAC;AAlBhC,SAAkBG,OAAU;AAoB1B,SAAK,WAAW;AAChB,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,OAAO,WAAW,OAAoD;AACpE,WAAO,6BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA3BoBE,MAAAD;;;ACPpB,IAAAE,oBAA2B;AAE3B,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAMO,IAAM,kBAAN,cAA8B,6BAAW;AAAA,EAM9C,YAAY;AAAA,IACV;AAAA,IACA,iBAAiB;AAAA,IACjB,UAAU,yCAAyC,QAAQ,MACzD,mBAAmB,SACf,4BACA,oBAAoB,eAAe,KAAK,IAAI,CAAC,GACnD;AAAA,EACF,GAIG;AACD,UAAM,EAAE,MAAAH,OAAM,QAAQ,CAAC;AAlBzB,SAAkBG,OAAU;AAoB1B,SAAK,WAAW;AAChB,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,OAAO,WAAW,OAA0C;AAC1D,WAAO,6BAAW,UAAU,OAAOF,OAAM;AAAA,EAC3C;AACF;AA3BoBE,MAAAD;;;ACPpB,IAAAE,oBAA4C;AAI5C,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AANhC,IAAAE;AAQO,IAAM,sBAAN,cAAkC,6BAAW;AAAA,EAKlD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA,UAAU,kCAA8B,mCAAgB,KAAK,CAAC;AAAA,EAChE,GAIG;AACD,UAAM,EAAE,MAAAH,QAAM,SAAS,MAAM,CAAC;AAbhC,SAAkBG,QAAU;AAc1B,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,OAAO,WAAW,OAA8C;AAC9D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AApBoBE,OAAAD;;;ACTpB,IAAAE,oBAAuD;AAEvD,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,qBAAN,cAAiC,6BAAW;AAAA,EAMjD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,wBAAwB,QAAQ,SAAK,mCAAgB,KAAK,CAAC;AAAA,EACvE,GAKG;AACD,UAAM,EAAE,MAAAH,QAAM,SAAS,MAAM,CAAC;AAhBhC,SAAkBG,QAAU;AAkB1B,SAAK,WAAW;AAChB,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,OAAO,WAAW,OAA6C;AAC7D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AAzBoBE,OAAAD;;;ACFpB,IAAAE,mBAAyB;;;ACLlB,SAAS,iBACdC,SACmC;AACnC,SAAOA,WAAU,QAAQ,OAAO,KAAKA,OAAM,EAAE,SAAS;AACxD;;;ADMO,SAAS,0BAEd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GASE;AACA,MAAI,CAAC,iBAAiB,KAAK,GAAG;AAC5B,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY;AAAA,IACd;AAAA,EACF;AAGA,QAAM,gBACJ,eAAe,OACX,OAAO,QAAQ,KAAK,EAAE;AAAA,IAAO,CAAC,CAACC,MAAI,MACjC,YAAY,SAASA,MAAmB;AAAA,EAC1C,IACA,OAAO,QAAQ,KAAK;AAE1B,SAAO;AAAA,IACL,OAAO,cAAc,IAAI,CAAC,CAACA,QAAMC,KAAI,MAAM;AACzC,YAAM,WAAWA,MAAK;AACtB,cAAQ,UAAU;AAAA,QAChB,KAAK;AAAA,QACL,KAAK;AACH,iBAAO;AAAA,YACL,MAAM;AAAA,YACN,MAAAD;AAAA,YACA,aAAaC,MAAK;AAAA,YAClB,gBAAY,2BAASA,MAAK,UAAU,EAAE;AAAA,UACxC;AAAA,QACF,KAAK;AACH,iBAAO;AAAA,YACL,MAAM;AAAA,YACN,MAAAD;AAAA,YACA,IAAIC,MAAK;AAAA,YACT,MAAMA,MAAK;AAAA,UACb;AAAA,QACF,SAAS;AACP,gBAAM,kBAAyB;AAC/B,gBAAM,IAAI,MAAM,0BAA0B,eAAe,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF,CAAC;AAAA,IACD,YACE,cAAc,OACV,EAAE,MAAM,OAAO,IACf,OAAO,eAAe,WACtB,EAAE,MAAM,WAAW,IACnB,EAAE,MAAM,QAAiB,UAAU,WAAW,SAAmB;AAAA,EACzE;AACF;;;AEzEA,IAAM,uBAAuB;AAWtB,SAAS,sBAAsBC,OAMxB;AACZ,QAAM,QAAQA,MAAK,MAAM,oBAAoB;AAC7C,SAAO,QACH,EAAE,QAAQ,MAAM,CAAC,GAAG,YAAY,MAAM,CAAC,GAAG,QAAQ,MAAM,CAAC,EAAE,IAC3D;AACN;;;ACpBO,SAAS,8BAA8BC,OAAsB;AAClE,QAAM,QAAQ,sBAAsBA,KAAI;AACxC,SAAO,QAAQ,MAAM,SAAS,MAAM,aAAaA;AACnD;;;ACJA,IAAAC,yBAAiD;AACjD,IAAAC,mBAAiC;AAUjC,eAAsB,cAAsD;AAAA,EAC1E;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAMkC;AAChC,MAAI,SAAS,MAAM;AACjB,UAAM,IAAI,gBAAgB,EAAE,UAAU,SAAS,SAAS,CAAC;AAAA,EAC3D;AAEA,MAAI;AACF,WAAO,MAAM,gBAAgB,EAAE,UAAU,MAAM,CAAC;AAAA,EAClD,SAAS,OAAO;AACd,QACE,kBAAkB,QAClB,EACE,gBAAgB,WAAW,KAAK,KAChC,0BAA0B,WAAW,KAAK,IAE5C;AACA,YAAM;AAAA,IACR;AAEA,QAAI,mBAA2D;AAE/D,QAAI;AACF,yBAAmB,MAAM,eAAe;AAAA,QACtC;AAAA,QACA;AAAA,QACA,iBAAiB,CAAC,EAAE,SAAS,UAC3B,2BAAS,MAAM,QAAQ,EAAE,UAAU,EAAE;AAAA,QACvC;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,aAAa;AACpB,YAAM,IAAI,oBAAoB;AAAA,QAC5B,OAAO;AAAA,QACP,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAGA,QAAI,oBAAoB,MAAM;AAC5B,YAAM;AAAA,IACR;AAEA,WAAO,MAAM,gBAAgB,EAAE,UAAU,kBAAkB,MAAM,CAAC;AAAA,EACpE;AACF;AAEA,eAAe,gBAAwD;AAAA,EACrE;AAAA,EACA;AACF,GAGkC;AAChC,QAAM,WAAW,SAAS;AAE1B,QAAMC,QAAO,MAAM,QAAQ;AAE3B,MAAIA,SAAQ,MAAM;AAChB,UAAM,IAAI,gBAAgB;AAAA,MACxB,UAAU,SAAS;AAAA,MACnB,gBAAgB,OAAO,KAAK,KAAK;AAAA,IACnC,CAAC;AAAA,EACH;AAEA,QAAM,aAAS,2BAASA,MAAK,UAAU;AAMvC,QAAM,cACJ,SAAS,KAAK,KAAK,MAAM,SACrB,0CAAkB,EAAE,OAAO,CAAC,GAAG,OAAO,CAAC,QACvC,sCAAc,EAAE,MAAM,SAAS,MAAM,OAAO,CAAC;AAEnD,MAAI,YAAY,YAAY,OAAO;AACjC,UAAM,IAAI,0BAA0B;AAAA,MAClC;AAAA,MACA,UAAU,SAAS;AAAA,MACnB,OAAO,YAAY;AAAA,IACrB,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,YAAY,SAAS;AAAA,IACrB;AAAA,IACA,MAAM,YAAY;AAAA,EACpB;AACF;;;ACrGO,SAAS,mBAA2D;AAAA,EACzE,MAAAC,QAAO;AAAA,EACP;AAAA,EACA;AAAA,EACA;AACF,GAKkD;AAChD,QAAM,mBAAkE,CAAC;AAEzE,mBAAiB,KAAK;AAAA,IACpB,MAAM;AAAA,IACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAAA,MAAK,GAAG,GAAG,SAAS;AAAA,EAChD,CAAC;AAED,MAAI,YAAY,SAAS,GAAG;AAC1B,qBAAiB,KAAK;AAAA,MACpB,MAAM;AAAA,MACN,SAAS,YAAY,IAAI,CAAC,eAA+B;AACvD,cAAMC,QAAO,MAAM,WAAW,QAAQ;AACtC,gBAAOA,SAAA,gBAAAA,MAAM,qCAAoC,OAC7C;AAAA,UACE,MAAM;AAAA,UACN,YAAY,WAAW;AAAA,UACvB,UAAU,WAAW;AAAA,UACrB,QAAQA,MAAK,iCAAiC,WAAW,MAAM;AAAA,UAC/D,sBAAsBA,MAAK;AAAA,YACzB,WAAW;AAAA,UACb;AAAA,QACF,IACA;AAAA,UACE,MAAM;AAAA,UACN,YAAY,WAAW;AAAA,UACvB,UAAU,WAAW;AAAA,UACrB,QAAQ,WAAW;AAAA,QACrB;AAAA,MACN,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,SAAO;AACT;;;AXtBA,IAAMC,0BAAqB,0CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAgD1E,eAAsB,aAGpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,qBAAqB;AAAA,EACrB,4BAA4B,gBAAgB;AAAA,EAC5C,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,0BAA0B;AAAA,EAC1B,6BAA6B;AAAA,EAC7B,WAAW;AAAA,IACT,YAAAC,cAAaD;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL;AAAA,EACA,GAAG;AACL,GAsEiD;AAlLjD,MAAAE;AAmLE,MAAI,WAAW,GAAG;AAChB,UAAM,IAAI,qBAAqB;AAAA,MAC7B,WAAW;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,QAAM,EAAE,YAAY,MAAM,IAAI,eAAe,EAAE,YAAY,cAAc,CAAC;AAE1E,QAAM,0BAA0B,2BAA2B;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,EACtC,CAAC;AAED,QAAM,gBAAgB,kBAAkB;AAAA,IACtC,QAAQ;AAAA,MACN,SAAQA,OAAA,iCAAQ,uBAAuB,EAAE,QAAQ,MAAM,OAA/C,OAAAA,OAAqD;AAAA,MAC7D;AAAA,MACA;AAAA,IACF;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SAAS,UAAU,SAAS;AAElC,SAAO,WAAW;AAAA,IAChB,MAAM;AAAA,IACN,YAAY,0BAA0B;AAAA,MACpC;AAAA,MACA,YAAY;AAAA,QACV,GAAG,sBAAsB;AAAA,UACvB,aAAa;AAAA,UACb;AAAA,QACF,CAAC;AAAA,QACD,GAAG;AAAA;AAAA,QAEH,aAAa;AAAA,UACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,QAC1D;AAAA,QACA,wBAAwB;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,IACD;AAAA,IACA,IAAI,OAAM,SAAQ;AAjOtB,UAAAA,MAAA;AAkOM,YAAM,OAAO;AAAA,QACX,MAAM;AAAA,QACN,GAAG,0BAA0B,EAAE,OAAO,YAAY,YAAY,CAAC;AAAA,MACjE;AAEA,YAAM,eAAe,oBAAoB,QAAQ;AAEjD,UAAI;AAGJ,UAAI,mBAAyC,CAAC;AAC9C,UAAI,qBAA6C,CAAC;AAClD,UAAI,YAAY;AAChB,YAAM,mBACJ,CAAC;AACH,UAAIC,QAAO;AACX,YAAM,QAAoD,CAAC;AAC3D,UAAI,QAA4B;AAAA,QAC9B,kBAAkB;AAAA,QAClB,cAAc;AAAA,QACd,aAAa;AAAA,MACf;AAEA,UAAI,WAA4D;AAEhE,SAAG;AAED,cAAM,eAAe,cAAc,IAAI,cAAc,OAAO;AAE5D,cAAM,oBAAoB;AAAA,UACxB,GAAG,cAAc;AAAA,UACjB,GAAG;AAAA,QACL;AAEA,cAAM,iBAAiB,MAAM,6BAA6B;AAAA,UACxD,QAAQ;AAAA,YACN,MAAM;AAAA,YACN,QAAQ,cAAc;AAAA,YACtB,UAAU;AAAA,UACZ;AAAA,UACA,wBAAwB,MAAM;AAAA,UAC9B,kBAAkB,MAAM;AAAA,QAC1B,CAAC;AAED,+BAAuB,MAAM;AAAA,UAAM,MACjC,WAAW;AAAA,YACT,MAAM;AAAA,YACN,YAAY,0BAA0B;AAAA,cACpC;AAAA,cACA,YAAY;AAAA,gBACV,GAAG,sBAAsB;AAAA,kBACvB,aAAa;AAAA,kBACb;AAAA,gBACF,CAAC;AAAA,gBACD,GAAG;AAAA,gBACH,oBAAoB,EAAE,OAAO,MAAM,aAAa;AAAA,gBAChD,sBAAsB;AAAA,kBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,gBAC5C;AAAA,gBACA,mBAAmB;AAAA;AAAA,kBAEjB,OAAO,MAAG;AA/R5B,wBAAAD;AA+R+B,4BAAAA,OAAA,KAAK,UAAL,gBAAAA,KAAY,IAAI,CAAAE,UAAQ,KAAK,UAAUA,KAAI;AAAA;AAAA,gBAC1D;AAAA,gBACA,wBAAwB;AAAA,kBACtB,OAAO,MACL,KAAK,cAAc,OACf,KAAK,UAAU,KAAK,UAAU,IAC9B;AAAA,gBACR;AAAA;AAAA,gBAGA,iBAAiB,MAAM;AAAA,gBACvB,wBAAwB,MAAM;AAAA,gBAC9B,oCAAoC,SAAS;AAAA,gBAC7C,6BAA6B,SAAS;AAAA,gBACtC,mCAAmC,SAAS;AAAA,gBAC5C,iCAAiC,SAAS;AAAA,gBAC1C,8BAA8B,SAAS;AAAA,gBACvC,wBAAwB,SAAS;AAAA,gBACjC,wBAAwB,SAAS;AAAA,cACnC;AAAA,YACF,CAAC;AAAA,YACD;AAAA,YACA,IAAI,OAAMC,UAAQ;AArT9B,kBAAAH,MAAAI,KAAAC,KAAAC,KAAAC,KAAAC;AAsTc,oBAAM,SAAS,MAAM,MAAM,WAAW;AAAA,gBACpC;AAAA,gBACA,GAAG;AAAA,gBACH,aAAa;AAAA,gBACb,gBAAgB,iCAAQ,eAAe,EAAE,MAAM;AAAA,gBAC/C,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC;AAGD,oBAAM,eAAe;AAAA,gBACnB,KAAIJ,OAAAJ,OAAA,OAAO,aAAP,gBAAAA,KAAiB,OAAjB,OAAAI,MAAuBL,YAAW;AAAA,gBACtC,YAAWO,OAAAD,MAAA,OAAO,aAAP,gBAAAA,IAAiB,cAAjB,OAAAC,MAA8B,YAAY;AAAA,gBACrD,UAASE,OAAAD,MAAA,OAAO,aAAP,gBAAAA,IAAiB,YAAjB,OAAAC,MAA4B,MAAM;AAAA,cAC7C;AAGA,cAAAL,MAAK;AAAA,gBACH,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,4BAA4B,OAAO;AAAA,oBACnC,oBAAoB;AAAA,sBAClB,QAAQ,MAAM,OAAO;AAAA,oBACvB;AAAA,oBACA,yBAAyB;AAAA,sBACvB,QAAQ,MAAM,KAAK,UAAU,OAAO,SAAS;AAAA,oBAC/C;AAAA,oBACA,kBAAkB,aAAa;AAAA,oBAC/B,qBAAqB,aAAa;AAAA,oBAClC,yBACE,aAAa,UAAU,YAAY;AAAA,oBAErC,yBAAyB,OAAO,MAAM;AAAA,oBACtC,6BAA6B,OAAO,MAAM;AAAA;AAAA,oBAG1C,kCAAkC,CAAC,OAAO,YAAY;AAAA,oBACtD,sBAAsB,aAAa;AAAA,oBACnC,yBAAyB,aAAa;AAAA,oBACtC,6BAA6B,OAAO,MAAM;AAAA,oBAC1C,8BAA8B,OAAO,MAAM;AAAA,kBAC7C;AAAA,gBACF,CAAC;AAAA,cACH;AAEA,qBAAO,EAAE,GAAG,QAAQ,UAAU,aAAa;AAAA,YAC7C;AAAA,UACF,CAAC;AAAA,QACH;AAGA,2BAAmB,MAAM,QAAQ;AAAA,YAC9BH,OAAA,qBAAqB,cAArB,OAAAA,OAAkC,CAAC,GAAG;AAAA,YAAI,cACzC,cAAc;AAAA,cACZ;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA,UAAU;AAAA,YACZ,CAAC;AAAA,UACH;AAAA,QACF;AAGA,6BACE,SAAS,OACL,CAAC,IACD,MAAM,aAAa;AAAA,UACjB,WAAW;AAAA,UACX;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV;AAAA,QACF,CAAC;AAGP,cAAM,eAAe;AAAA,UACnB,qBAAqB;AAAA,QACvB;AACA,gBAAQ,sBAAsB,OAAO,YAAY;AAGjD,YAAI,eAAoD;AACxD,YAAI,EAAE,YAAY,UAAU;AAC1B,cACE,iBACA,qBAAqB,iBAAiB;AAAA,UAEtC,iBAAiB,WAAW,GAC5B;AACA,2BAAe;AAAA,UACjB;AAAA;AAAA,YAEE,iBAAiB,SAAS;AAAA,YAE1B,mBAAmB,WAAW,iBAAiB;AAAA,YAC/C;AACA,2BAAe;AAAA,UACjB;AAAA,QACF;AAGA,cAAM,gBAAe,0BAAqB,SAArB,YAA6B;AAClD,cAAM,mCACJ,aAAa;AAAA,QACbC,MAAK,QAAQ,MAAMA,QACf,aAAa,UAAU,IACvB;AACN,cAAM,WACJ,iBAAiB,aACb,8BAA8B,gCAAgC,IAC9D;AAEN,QAAAA,QACE,iBAAiB,cAAc,aAAa,aACxCA,QAAO,WACP;AAGN,YAAI,aAAa,YAAY;AAI3B,gBAAM,cAAc,iBAClB,iBAAiB,SAAS,CAC5B;AAEA,cAAI,OAAO,YAAY,YAAY,UAAU;AAC3C,wBAAY,WAAW;AAAA,UACzB,OAAO;AACL,wBAAY,QAAQ,KAAK;AAAA,cACvB,MAAM;AAAA,cACN,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF,OAAO;AACL,2BAAiB;AAAA,YACf,GAAG,mBAAmB;AAAA,cACpB,MAAAA;AAAA,cACA,OAAO,wBAAU,CAAC;AAAA,cAClB,WAAW;AAAA,cACX,aAAa;AAAA,YACf,CAAC;AAAA,UACH;AAAA,QACF;AAGA,cAAM,oBAAuC;AAAA,UAC3C;AAAA,UACA,MAAM;AAAA,UACN,WAAW;AAAA,UACX,aAAa;AAAA,UACb,cAAc,qBAAqB;AAAA,UACnC,OAAO;AAAA,UACP,UAAU,qBAAqB;AAAA,UAC/B,UAAU,qBAAqB;AAAA,UAC/B,UAAS,0BAAqB,YAArB,YAAgC,CAAC;AAAA,UAC1C,UAAU;AAAA,YACR,GAAG,qBAAqB;AAAA,YACxB,UAAS,0BAAqB,gBAArB,mBAAkC;AAAA;AAAA,YAG3C,UAAU,KAAK,MAAM,KAAK,UAAU,gBAAgB,CAAC;AAAA,UACvD;AAAA,UACA,+BAA+B,qBAAqB;AAAA,UACpD,aAAa,iBAAiB;AAAA,QAChC;AACA,cAAM,KAAK,iBAAiB;AAC5B,eAAM,6CAAe;AAErB,mBAAW;AAAA,MACb,SAAS,aAAa;AAGtB,WAAK;AAAA,QACH,0BAA0B;AAAA,UACxB;AAAA,UACA,YAAY;AAAA,YACV,4BAA4B,qBAAqB;AAAA,YACjD,oBAAoB;AAAA,cAClB,QAAQ,MAAM,qBAAqB;AAAA,YACrC;AAAA,YACA,yBAAyB;AAAA,cACvB,QAAQ,MAAM,KAAK,UAAU,qBAAqB,SAAS;AAAA,YAC7D;AAAA,YAEA,yBAAyB,qBAAqB,MAAM;AAAA,YACpD,6BACE,qBAAqB,MAAM;AAAA,UAC/B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,IAAI,0BAA0B;AAAA,QACnC,MAAAA;AAAA,QACA,QACE,UAAU,OACL,SACD,OAAO;AAAA,UACL,EAAE,MAAAA,MAAK;AAAA,UACP;AAAA,YACE,UAAU,qBAAqB;AAAA,YAC/B;AAAA,UACF;AAAA,QACF;AAAA,QACN,WAAW;AAAA,QACX,aAAa;AAAA,QACb,cAAc,qBAAqB;AAAA,QACnC;AAAA,QACA,UAAU,qBAAqB;AAAA,QAC/B,UAAS,0BAAqB,YAArB,YAAgC,CAAC;AAAA,QAC1C,UAAU;AAAA,UACR,GAAG,qBAAqB;AAAA,UACxB,UAAS,0BAAqB,gBAArB,mBAAkC;AAAA,UAC3C,UAAU;AAAA,QACZ;AAAA,QACA,UAAU,qBAAqB;AAAA,QAC/B;AAAA,QACA,kBAAkB,qBAAqB;AAAA,MACzC,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAEA,eAAe,aAAqD;AAAA,EAClE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAOoC;AAClC,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,UAAU,IAAI,OAAO,EAAE,YAAY,UAAU,KAAK,MAAM;AACtD,YAAMC,QAAO,MAAM,QAAQ;AAE3B,WAAIA,SAAA,gBAAAA,MAAM,YAAW,MAAM;AACzB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,WAAW;AAAA,QAC9B,MAAM;AAAA,QACN,YAAY,0BAA0B;AAAA,UACpC;AAAA,UACA,YAAY;AAAA,YACV,GAAG,sBAAsB;AAAA,cACvB,aAAa;AAAA,cACb;AAAA,YACF,CAAC;AAAA,YACD,oBAAoB;AAAA,YACpB,kBAAkB;AAAA,YAClB,oBAAoB;AAAA,cAClB,QAAQ,MAAM,KAAK,UAAU,IAAI;AAAA,YACnC;AAAA,UACF;AAAA,QACF,CAAC;AAAA,QACD;AAAA,QACA,IAAI,OAAM,SAAQ;AAChB,cAAI;AACF,kBAAMO,UAAS,MAAMP,MAAK,QAAS,MAAM;AAAA,cACvC;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAED,gBAAI;AACF,mBAAK;AAAA,gBACH,0BAA0B;AAAA,kBACxB;AAAA,kBACA,YAAY;AAAA,oBACV,sBAAsB;AAAA,sBACpB,QAAQ,MAAM,KAAK,UAAUO,OAAM;AAAA,oBACrC;AAAA,kBACF;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF,SAAS,SAAS;AAAA,YAKlB;AAEA,mBAAOA;AAAA,UACT,SAAS,OAAO;AACd,kBAAM,IAAI,mBAAmB;AAAA,cAC3B;AAAA,cACA,UAAU;AAAA,cACV,OAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,YAAY;AAAA,IACjB,CAAC,WAAiD,UAAU;AAAA,EAC9D;AACF;AAEA,IAAM,4BAAN,MAEA;AAAA,EAoBE,YAAY,SAgBT;AACD,SAAK,OAAO,QAAQ;AACpB,SAAK,YAAY,QAAQ;AACzB,SAAK,cAAc,QAAQ;AAC3B,SAAK,eAAe,QAAQ;AAC5B,SAAK,QAAQ,QAAQ;AACrB,SAAK,WAAW,QAAQ;AACxB,SAAK,UAAU,QAAQ;AACvB,SAAK,WAAW,QAAQ;AACxB,SAAK,QAAQ,QAAQ;AACrB,SAAK,gCAAgC,QAAQ;AAC7C,SAAK,WAAW,QAAQ;AACxB,SAAK,sBAAsB,QAAQ;AAAA,EACrC;AACF;;;AY1qBA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,0BAAiD;AACjD,IAAAC,mBAAiC;AA6B1B,IAAM,OAAO,OAAuB;AAAA,EACzC,MAAM;AAAA,EACN,gBAAgB,OAAO,EAAE,MAAM,OAAO;AAAA,EACtC,uBAAuB,EAAE,OAAO,GAAmC;AACjE,WAAO;AAAA,EACT;AAAA,EACA,YAAY,EAAE,MAAAC,MAAK,GAAqB;AACtC,WAAOA;AAAA,EACT;AACF;AAEO,IAAM,SAAS,CAAS;AAAA,EAC7B,QAAQ;AACV,MAEsB;AACpB,QAAM,aAAS,2BAAS,WAAW;AAEnC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBAAgB,CAAC,EAAE,MAAM,OAAO;AAAA,MAC9B,MAAM;AAAA,MACN,QAAQ,MAAM,4BAA4B,OAAO,aAAa;AAAA,IAChE;AAAA,IACA,uBAAuB,EAAE,QAAQ,MAAM,GAAG;AAGxC,aAAO,MAAM,4BACT,SACA,sBAAsB;AAAA,QACpB,QAAQ;AAAA,QACR,QAAQ,OAAO;AAAA,MACjB,CAAC;AAAA,IACP;AAAA,IACA,YACE,EAAE,MAAAA,MAAK,GACP,SAIA;AACA,YAAM,kBAAc,uCAAc,EAAE,MAAAA,MAAK,CAAC;AAE1C,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,YAAY;AAAA,UACnB,MAAAA;AAAA,UACA,UAAU,QAAQ;AAAA,UAClB,OAAO,QAAQ;AAAA,QACjB,CAAC;AAAA,MACH;AAEA,YAAM,uBAAmB,2CAAkB;AAAA,QACzC,OAAO,YAAY;AAAA,QACnB;AAAA,MACF,CAAC;AAED,UAAI,CAAC,iBAAiB,SAAS;AAC7B,cAAM,IAAI,uBAAuB;AAAA,UAC/B,SAAS;AAAA,UACT,OAAO,iBAAiB;AAAA,UACxB,MAAAA;AAAA,UACA,UAAU,QAAQ;AAAA,UAClB,OAAO,QAAQ;AAAA,QACjB,CAAC;AAAA,MACH;AAEA,aAAO,iBAAiB;AAAA,IAC1B;AAAA,EACF;AACF;;;ACrGA,IAAAC,0BAAkC;AAClC,IAAAC,mBAAuD;;;ACYhD,SAAS,aACd,SACA,SACiC;AACjC,QAAM,UAAU,QAAQ,UAAU;AAClC,QAAM,UAAU,QAAQ,UAAU;AAElC,MAAI,YACF;AACF,MAAI,YACF;AAEF,MAAI,cAAc;AAClB,MAAI,cAAc;AAGlB,iBAAe,YACb,YACA;AACA,QAAI;AACF,UAAI,aAAa,MAAM;AACrB,oBAAY,QAAQ,KAAK;AAAA,MAC3B;AAEA,YAAM,SAAS,MAAM;AACrB,kBAAY;AAEZ,UAAI,CAAC,OAAO,MAAM;AAChB,mBAAW,QAAQ,OAAO,KAAK;AAAA,MACjC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,SAAS,OAAO;AACd,iBAAW,MAAM,KAAK;AAAA,IACxB;AAAA,EACF;AAGA,iBAAe,YACb,YACA;AACA,QAAI;AACF,UAAI,aAAa,MAAM;AACrB,oBAAY,QAAQ,KAAK;AAAA,MAC3B;AAEA,YAAM,SAAS,MAAM;AACrB,kBAAY;AAEZ,UAAI,CAAC,OAAO,MAAM;AAChB,mBAAW,QAAQ,OAAO,KAAK;AAAA,MACjC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,SAAS,OAAO;AACd,iBAAW,MAAM,KAAK;AAAA,IACxB;AAAA,EACF;AAEA,SAAO,IAAI,eAAgC;AAAA,IACzC,MAAM,KAAK,YAAY;AACrB,UAAI;AAEF,YAAI,aAAa;AACf,gBAAM,YAAY,UAAU;AAC5B;AAAA,QACF;AAGA,YAAI,aAAa;AACf,gBAAM,YAAY,UAAU;AAC5B;AAAA,QACF;AAGA,YAAI,aAAa,MAAM;AACrB,sBAAY,QAAQ,KAAK;AAAA,QAC3B;AACA,YAAI,aAAa,MAAM;AACrB,sBAAY,QAAQ,KAAK;AAAA,QAC3B;AAKA,cAAM,EAAE,QAAQ,OAAO,IAAI,MAAM,QAAQ,KAAK;AAAA,UAC5C,UAAU,KAAK,CAAAC,aAAW,EAAE,QAAAA,SAAQ,QAAQ,QAAQ,EAAE;AAAA,UACtD,UAAU,KAAK,CAAAA,aAAW,EAAE,QAAAA,SAAQ,QAAQ,QAAQ,EAAE;AAAA,QACxD,CAAC;AAED,YAAI,CAAC,OAAO,MAAM;AAChB,qBAAW,QAAQ,OAAO,KAAK;AAAA,QACjC;AAEA,YAAI,WAAW,SAAS;AACtB,sBAAY;AACZ,cAAI,OAAO,MAAM;AAEf,kBAAM,YAAY,UAAU;AAC5B,0BAAc;AAAA,UAChB;AAAA,QACF,OAAO;AACL,sBAAY;AAEZ,cAAI,OAAO,MAAM;AACf,0BAAc;AACd,kBAAM,YAAY,UAAU;AAAA,UAC9B;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,cAAQ,OAAO;AACf,cAAQ,OAAO;AAAA,IACjB;AAAA,EACF,CAAC;AACH;;;AClIA,IAAAC,mBAA2B;AA+DpB,SAAS,uBAA+D;AAAA,EAC7E;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAUuD;AAErD,MAAI,8BAEO;AACX,QAAM,oBAAoB,IAAI,eAE5B;AAAA,IACA,MAAM,YAAY;AAChB,oCAA8B;AAAA,IAChC;AAAA,EACF,CAAC;AAGD,QAAM,kBAA2C,CAAC;AAGlD,QAAM,yBAAyB,oBAAI,IAAY;AAE/C,MAAI,WAAW;AACf,MAAI,cAEY;AAEhB,WAAS,eAAe;AAEtB,QAAI,YAAY,uBAAuB,SAAS,GAAG;AAIjD,UAAI,eAAe,MAAM;AACvB,oCAA6B,QAAQ,WAAW;AAAA,MAClD;AAEA,kCAA6B,MAAM;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,gBAAgB,IAAI,gBAGxB;AAAA,IACA,MAAM,UACJ,OACA,YAGA;AACA,YAAM,YAAY,MAAM;AAExB,cAAQ,WAAW;AAAA,QAEjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,SAAS;AACZ,qBAAW,QAAQ,KAAK;AACxB;AAAA,QACF;AAAA,QAGA,KAAK,mBAAmB;AACtB,cAAI,mBAAmB;AACrB,gBAAI,CAAC,gBAAgB,MAAM,UAAU,GAAG;AACtC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,cAClB,CAAC;AAED,8BAAgB,MAAM,UAAU,IAAI;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,YAAY,MAAM;AAAA,cAClB,UAAU,MAAM;AAAA,cAChB,eAAe,MAAM;AAAA,YACvB,CAAC;AAAA,UACH;AACA;AAAA,QACF;AAAA,QAGA,KAAK,aAAa;AAChB,cAAI;AACF,kBAAM,WAAW,MAAM,cAAc;AAAA,cACnC,UAAU;AAAA,cACV;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAED,uBAAW,QAAQ,QAAQ;AAE3B,kBAAMC,QAAO,MAAO,SAAS,QAAQ;AAErC,gBAAIA,MAAK,WAAW,MAAM;AACxB,oBAAM,sBAAkB,6BAAW;AACnC,qCAAuB,IAAI,eAAe;AAK1C,yBAAW;AAAA,gBACT,MAAM;AAAA,gBACN,YAAY,0BAA0B;AAAA,kBACpC;AAAA,kBACA,YAAY;AAAA,oBACV,GAAG,sBAAsB;AAAA,sBACvB,aAAa;AAAA,sBACb;AAAA,oBACF,CAAC;AAAA,oBACD,oBAAoB,SAAS;AAAA,oBAC7B,kBAAkB,SAAS;AAAA,oBAC3B,oBAAoB;AAAA,sBAClB,QAAQ,MAAM,KAAK,UAAU,SAAS,IAAI;AAAA,oBAC5C;AAAA,kBACF;AAAA,gBACF,CAAC;AAAA,gBACD;AAAA,gBACA,IAAI,OAAM,SACRA,MAAK,QAAS,SAAS,MAAM;AAAA,kBAC3B,YAAY,SAAS;AAAA,kBACrB;AAAA,kBACA;AAAA,gBACF,CAAC,EAAE;AAAA,kBACD,CAAC,WAAgB;AACf,gDAA6B,QAAQ;AAAA,sBACnC,GAAG;AAAA,sBACH,MAAM;AAAA,sBACN;AAAA,oBACF,CAAQ;AAER,2CAAuB,OAAO,eAAe;AAE7C,iCAAa;AAGb,wBAAI;AACF,2BAAK;AAAA,wBACH,0BAA0B;AAAA,0BACxB;AAAA,0BACA,YAAY;AAAA,4BACV,sBAAsB;AAAA,8BACpB,QAAQ,MAAM,KAAK,UAAU,MAAM;AAAA,4BACrC;AAAA,0BACF;AAAA,wBACF,CAAC;AAAA,sBACH;AAAA,oBACF,SAAS,SAAS;AAAA,oBAKlB;AAAA,kBACF;AAAA,kBACA,CAAC,UAAe;AACd,gDAA6B,QAAQ;AAAA,sBACnC,MAAM;AAAA,sBACN,OAAO,IAAI,mBAAmB;AAAA,wBAC5B,UAAU,SAAS;AAAA,wBACnB,UAAU,SAAS;AAAA,wBACnB,OAAO;AAAA,sBACT,CAAC;AAAA,oBACH,CAAC;AAED,2CAAuB,OAAO,eAAe;AAC7C,iCAAa;AAAA,kBACf;AAAA,gBACF;AAAA,cACJ,CAAC;AAAA,YACH;AAAA,UACF,SAAS,OAAO;AACd,wCAA6B,QAAQ;AAAA,cACnC,MAAM;AAAA,cACN;AAAA,YACF,CAAC;AAAA,UACH;AAEA;AAAA,QACF;AAAA,QAEA,KAAK,UAAU;AACb,wBAAc;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,MAAM;AAAA,YACpB,UAAU,MAAM;AAAA,YAChB,OAAO,4BAA4B,MAAM,KAAK;AAAA,YAC9C,+BAA+B,MAAM;AAAA,UACvC;AACA;AAAA,QACF;AAAA,QAEA,SAAS;AACP,gBAAM,mBAA0B;AAChC,gBAAM,IAAI,MAAM,yBAAyB,gBAAgB,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,IAEA,QAAQ;AACN,iBAAW;AACX,mBAAa;AAAA,IACf;AAAA,EACF,CAAC;AAGD,SAAO,IAAI,eAAmD;AAAA,IAC5D,MAAM,MAAM,YAAY;AAGtB,aAAO,QAAQ,IAAI;AAAA,QACjB,gBAAgB,YAAY,aAAa,EAAE;AAAA,UACzC,IAAI,eAAe;AAAA,YACjB,MAAM,OAAO;AACX,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,YACA,QAAQ;AAAA,YAER;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,kBAAkB;AAAA,UAChB,IAAI,eAAe;AAAA,YACjB,MAAM,OAAO;AACX,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,YACA,QAAQ;AACN,yBAAW,MAAM;AAAA,YACnB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;;;AF1QA,IAAMC,0BAAqB,2CAAkB,EAAE,QAAQ,SAAS,MAAM,GAAG,CAAC;AAiDnE,SAAS,WAAmD;AAAA,EACjE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EACX,4BAA4B,gBAAgB;AAAA,EAC5C,wBAAwB;AAAA,EACxB,+BAA+B;AAAA,EAC/B,gCAAgC,oBAAoB;AAAA,EACpD,0BAA0B;AAAA,EAC1B,6BAA6B;AAAA,EAC7B,wBAAwB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW;AAAA,IACT,KAAAC,OAAM;AAAA,IACN,YAAAC,cAAaF;AAAA,IACb,cAAc,MAAM,oBAAI,KAAK;AAAA,EAC/B,IAAI,CAAC;AAAA,EACL,GAAG;AACL,GAiH8B;AAC5B,SAAO,IAAI,wBAAwB;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,uCAAY,EAAE,MAAsB;AAAA,IAC/C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,KAAAC;AAAA,IACA;AAAA,IACA,YAAAC;AAAA,EACF,CAAC;AACH;AAEA,IAAM,0BAAN,MAEA;AAAA,EAwCE,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,KAAAD;AAAA,IACA;AAAA,IACA,YAAAC;AAAA,EACF,GAiDG;AAjHH,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,eAAe,IAAI,eAElC;AACF,SAAiB,sBAAsB,IAAI,eAEzC;AACF,SAAiB,0BAA0B,IAAI,eAE7C;AACF,SAAiB,cAAc,IAAI,eAEjC;AACF,SAAiB,mBAAmB,IAAI,eAEtC;AACF,SAAiB,qBAAqB,IAAI,eAExC;AACF,SAAiB,iBAAiB,IAAI,eAEpC;AACF,SAAiB,kBAAkB,IAAI,eAErC;AACF,SAAiB,eAAe,IAAI,eAElC;AAqFA,QAAI,WAAW,GAAG;AAChB,YAAM,IAAI,qBAAqB;AAAA,QAC7B,WAAW;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAKA,QAAI,mBAAmB;AACvB,QAAI,2BAA2B;AAC/B,QAAI,mBAAmB;AACvB,QAAI,kBAA4D;AAChE,UAAM,mBAEF;AAAA,MACF,IAAIA,YAAW;AAAA,MACf,WAAW,YAAY;AAAA,MACvB,SAAS,MAAM;AAAA,MACf,UAAU,CAAC;AAAA,IACb;AACA,QAAI,oBAA4C,CAAC;AACjD,QAAI,sBAAgD,CAAC;AACrD,QAAI,uBAAiD;AACrD,QAAI,gBAAgD;AACpD,QAAI,2BAAyD;AAC7D,QAAI,WAAmD;AACvD,UAAM,gBAAqC,CAAC;AAC5C,QAAI;AAEJ,UAAM,iBAAiB,IAAI,gBAGzB;AAAA,MACA,MAAM,UAAU,OAAO,YAAY;AACjC,mBAAW,QAAQ,KAAK;AAExB,YACE,MAAM,SAAS,gBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,+BACf,MAAM,SAAS,mBACf;AACA,iBAAM,mCAAU,EAAE,MAAM;AAAA,QAC1B;AAEA,YAAI,MAAM,SAAS,cAAc;AAC/B,8BAAoB,MAAM;AAC1B,sCAA4B,MAAM;AAClC,8BAAoB,MAAM;AAAA,QAC5B;AAEA,YAAI,MAAM,SAAS,aAAa;AAC9B,4BAAkB,KAAK,KAAK;AAAA,QAC9B;AAEA,YAAI,MAAM,SAAS,eAAe;AAChC,8BAAoB,KAAK,KAAK;AAAA,QAChC;AAEA,YAAI,MAAM,SAAS,eAAe;AAChC,gBAAM,eAAe,mBAAmB;AAAA,YACtC,MAAM;AAAA,YACN,OAAO,wBAAU,CAAC;AAAA,YAClB,WAAW;AAAA,YACX,aAAa;AAAA,UACf,CAAC;AAGD,gBAAM,cAAc,cAAc;AAClC,cAAI,eAAoD;AACxD,cAAI,cAAc,IAAI,UAAU;AAC9B,gBACE,iBACA,MAAM,iBAAiB;AAAA,YAEvB,kBAAkB,WAAW,GAC7B;AACA,6BAAe;AAAA,YACjB;AAAA;AAAA,cAEE,kBAAkB,SAAS;AAAA,cAE3B,oBAAoB,WAAW,kBAAkB;AAAA,cACjD;AACA,6BAAe;AAAA,YACjB;AAAA,UACF;AAGA,gBAAM,oBAAuC;AAAA,YAC3C;AAAA,YACA,MAAM;AAAA,YACN,WAAW;AAAA,YACX,aAAa;AAAA,YACb,cAAc,MAAM;AAAA,YACpB,OAAO,MAAM;AAAA,YACb,UAAU,MAAM;AAAA,YAChB,UAAU,MAAM;AAAA,YAChB,SAAS,MAAM;AAAA,YACf,UAAU;AAAA,cACR,GAAG,MAAM;AAAA,cACT,UAAU,CAAC,GAAG,iBAAiB,UAAU,GAAG,YAAY;AAAA,YAC1D;AAAA,YACA,+BAA+B,MAAM;AAAA,YACrC,aAAa,MAAM;AAAA,UACrB;AAEA,iBAAM,6CAAe;AAErB,wBAAc,KAAK,iBAAiB;AAEpC,8BAAoB,CAAC;AACrB,gCAAsB,CAAC;AACvB,6BAAmB;AACnB,4BAAkB,MAAM;AAExB,cAAI,iBAAiB,QAAQ;AAC3B,uBAAW;AAAA,UACb;AAEA,cAAI,iBAAiB,YAAY;AAC/B,6BAAiB,SAAS,KAAK,GAAG,YAAY;AAC9C,uCAA2B;AAAA,UAC7B;AAAA,QACF;AAEA,YAAI,MAAM,SAAS,UAAU;AAC3B,2BAAiB,KAAK,MAAM,SAAS;AACrC,2BAAiB,YAAY,MAAM,SAAS;AAC5C,2BAAiB,UAAU,MAAM,SAAS;AAC1C,2BAAiB,UAAU,MAAM,SAAS;AAC1C,0BAAgB,MAAM;AACtB,iCAAuB,MAAM;AAC7B,qCAA2B,MAAM;AAAA,QACnC;AAAA,MACF;AAAA,MAEA,MAAM,MAAM,YAAY;AAnhB9B,YAAAC;AAohBQ,YAAI;AAEF,gBAAM,WAAW,cAAc,cAAc,SAAS,CAAC;AACvD,cAAI,UAAU;AACZ,iBAAK,gBAAgB,QAAQ,SAAS,QAAQ;AAC9C,iBAAK,eAAe,QAAQ,SAAS,OAAO;AAC5C,iBAAK,gBAAgB,QAAQ,SAAS,QAAQ;AAC9C,iBAAK,iBAAiB,QAAQ,SAAS,SAAS;AAChD,iBAAK,mBAAmB,QAAQ,SAAS,WAAW;AACpD,iBAAK,wBAAwB;AAAA,cAC3B,SAAS;AAAA,YACX;AAAA,UACF;AAGA,gBAAM,eAAe,sDAAwB;AAC7C,gBAAM,QAAQ,wCAAiB;AAAA,YAC7B,kBAAkB;AAAA,YAClB,cAAc;AAAA,YACd,aAAa;AAAA,UACf;AAGA,eAAK,oBAAoB,QAAQ,YAAY;AAC7C,eAAK,aAAa,QAAQ,KAAK;AAG/B,eAAK,YAAY,QAAQ,gBAAgB;AACzC,eAAK,aAAa,QAAQ,aAAa;AAGvC,iBAAM,qCAAW;AAAA,YACf;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,MAAM;AAAA,YACN,WAAW,SAAS;AAAA,YACpB,aAAa,SAAS;AAAA,YACtB,UAASA,OAAA,SAAS,YAAT,OAAAA,OAAoB,CAAC;AAAA,YAC9B,UAAU,SAAS;AAAA,YACnB,UAAU,SAAS;AAAA,YACnB,+BACE,SAAS;AAAA,YACX,OAAO;AAAA,UACT;AAGA,mBAAS;AAAA,YACP,0BAA0B;AAAA,cACxB;AAAA,cACA,YAAY;AAAA,gBACV,4BAA4B;AAAA,gBAC5B,oBAAoB,EAAE,QAAQ,MAAM,iBAAiB;AAAA,gBACrD,yBAAyB;AAAA,kBACvB,QAAQ,MAAG;AA1kB7B,wBAAAA;AA2kBoB,6BAAAA,OAAA,SAAS,cAAT,gBAAAA,KAAoB,UAChB,KAAK,UAAU,SAAS,SAAS,IACjC;AAAA;AAAA,gBACR;AAAA,gBAEA,yBAAyB,MAAM;AAAA,gBAC/B,6BAA6B,MAAM;AAAA,cACrC;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,SAAS,OAAO;AACd,qBAAW,MAAM,KAAK;AAAA,QACxB,UAAE;AACA,mBAAS,IAAI;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,mBAAmB,uBAA8C;AACvE,SAAK,YAAY,iBAAiB;AAClC,SAAK,cAAc,iBAAiB;AACpC,SAAK,cACH,YACI,iBAAiB,OAAO,YAAY,SAAS,IAC7C,iBAAiB,QACrB,YAAY,cAAc;AAE5B,UAAM,EAAE,YAAY,MAAM,IAAI,eAAe;AAAA,MAC3C,YAAY;AAAA,IACd,CAAC;AAED,UAAM,SAAS,UAAU,SAAS;AAElC,UAAM,0BAA0B,2BAA2B;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,EAAE,GAAG,UAAU,WAAW;AAAA,IACtC,CAAC;AAED,UAAM,gBAAgB,kBAAkB;AAAA,MACtC,QAAQ,EAAE,QAAQ,QAAQ,SAAS;AAAA,MACnC;AAAA,IACF,CAAC;AAED,UAAM,OAAO;AAEb,eAAW;AAAA,MACT,MAAM;AAAA,MACN,YAAY,0BAA0B;AAAA,QACpC;AAAA,QACA,YAAY;AAAA,UACV,GAAG,sBAAsB,EAAE,aAAa,iBAAiB,UAAU,CAAC;AAAA,UACpE,GAAG;AAAA;AAAA,UAEH,aAAa;AAAA,YACX,OAAO,MAAM,KAAK,UAAU,EAAE,QAAQ,QAAQ,SAAS,CAAC;AAAA,UAC1D;AAAA,UACA,wBAAwB;AAAA,QAC1B;AAAA,MACF,CAAC;AAAA,MACD;AAAA,MACA,aAAa;AAAA,MACb,IAAI,OAAM,gBAAe;AACvB,mBAAW;AAEX,uBAAe,WAAW;AAAA,UACxB;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAAC;AAAA,UACA;AAAA,UACA;AAAA,QACF,GAOG;AAED,gBAAM,eACJ,iBAAiB,WAAW,IAAI,cAAc,OAAO;AAEvD,gBAAM,oBAAoB;AAAA,YACxB,GAAG,cAAc;AAAA,YACjB,GAAG;AAAA,UACL;AAEA,gBAAM,iBAAiB,MAAM,6BAA6B;AAAA,YACxD,QAAQ;AAAA,cACN,MAAM;AAAA,cACN,QAAQ,cAAc;AAAA,cACtB,UAAU;AAAA,YACZ;AAAA,YACA,wBAAwB,MAAM;AAAA,YAC9B,kBAAkB,MAAM;AAAA,UAC1B,CAAC;AAED,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,GAAG,0BAA0B,EAAE,OAAO,YAAY,YAAY,CAAC;AAAA,UACjE;AAEA,gBAAM;AAAA,YACJ,QAAQ,EAAE,QAAQ,UAAU,aAAa,QAAQ;AAAA,YACjD;AAAA,YACA;AAAA,UACF,IAAI,MAAM;AAAA,YAAM,MACd,WAAW;AAAA,cACT,MAAM;AAAA,cACN,YAAY,0BAA0B;AAAA,gBACpC;AAAA,gBACA,YAAY;AAAA,kBACV,GAAG,sBAAsB;AAAA,oBACvB,aAAa;AAAA,oBACb;AAAA,kBACF,CAAC;AAAA,kBACD,GAAG;AAAA,kBACH,oBAAoB;AAAA,oBAClB,OAAO,MAAM;AAAA,kBACf;AAAA,kBACA,sBAAsB;AAAA,oBACpB,OAAO,MAAM,KAAK,UAAU,cAAc;AAAA,kBAC5C;AAAA,kBACA,mBAAmB;AAAA;AAAA,oBAEjB,OAAO,MAAG;AA5sB9B,0BAAAD;AA4sBiC,8BAAAA,OAAA,KAAK,UAAL,gBAAAA,KAAY,IAAI,CAAAE,UAAQ,KAAK,UAAUA,KAAI;AAAA;AAAA,kBAC1D;AAAA,kBACA,wBAAwB;AAAA,oBACtB,OAAO,MACL,KAAK,cAAc,OACf,KAAK,UAAU,KAAK,UAAU,IAC9B;AAAA,kBACR;AAAA;AAAA,kBAGA,iBAAiB,MAAM;AAAA,kBACvB,wBAAwB,MAAM;AAAA,kBAC9B,oCAAoC,SAAS;AAAA,kBAC7C,6BAA6B,SAAS;AAAA,kBACtC,mCAAmC,SAAS;AAAA,kBAC5C,iCAAiC,SAAS;AAAA,kBAC1C,8BAA8B,SAAS;AAAA,kBACvC,wBAAwB,SAAS;AAAA,kBACjC,wBAAwB,SAAS;AAAA,gBACnC;AAAA,cACF,CAAC;AAAA,cACD;AAAA,cACA,aAAa;AAAA,cACb,IAAI,OAAMC,mBAAiB;AAAA,gBACzB,kBAAkBL,KAAI;AAAA;AAAA,gBACtB,cAAAK;AAAA,gBACA,QAAQ,MAAM,MAAM,SAAS;AAAA,kBAC3B;AAAA,kBACA,GAAG,oBAAoB,QAAQ;AAAA,kBAC/B,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH;AAEA,gBAAM,oBAAoB,uBAAuB;AAAA,YAC/C;AAAA,YACA,iBAAiB;AAAA,YACjB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA;AAAA,UACF,CAAC;AAED,gBAAM,cAAc,4BAAW,CAAC;AAChC,gBAAM,gBAAwC,CAAC;AAC/C,gBAAM,kBAA4C,CAAC;AACnD,cAAI,mBAAiC;AACrC,cAAI,YAAgC;AAAA,YAClC,cAAc;AAAA,YACd,kBAAkB;AAAA,YAClB,aAAa;AAAA,UACf;AACA,cAAI;AACJ,cAAI,iBAAiB;AACrB,cAAI,WAAW;AACf,cAAI,eAAeF,cAAa,aAAa,mBAAmB;AAChE,cAAI;AACJ,cAAI,eAAiE;AAAA,YACnE,IAAIF,YAAW;AAAA,YACf,WAAW,YAAY;AAAA,YACvB,SAAS,MAAM;AAAA,UACjB;AAGA,cAAI,cAAc;AAClB,cAAI,qBAAqB;AACzB,cAAI,qBAAqB;AACzB,cAAI,sBAAsB;AAE1B,yBAAe,iBAAiB;AAAA,YAC9B;AAAA,YACA;AAAA,UACF,GAGG;AACD,uBAAW,QAAQ,KAAK;AAExB,wBAAY,MAAM;AAClB,4BAAgB,MAAM;AACtB,iCAAqB;AACrB,kCAAsB,MAAM,UAAU,QAAQ,MAAM,MAAM;AAAA,UAC5D;AAEA,eAAK;AAAA,YACH,kBAAkB;AAAA,cAChB,IAAI,gBAGF;AAAA,gBACA,MAAM,UAAU,OAAO,YAA2B;AA9yBlE,sBAAAC,MAAA;AAgzBkB,sBAAI,gBAAgB;AAClB,0BAAM,iBAAiBF,KAAI,IAAI;AAE/B,qCAAiB;AAEjB,iCAAa,SAAS,wBAAwB;AAAA,sBAC5C,8BAA8B;AAAA,oBAChC,CAAC;AAED,iCAAa,cAAc;AAAA,sBACzB,8BAA8B;AAAA,oBAChC,CAAC;AAAA,kBACH;AAGA,sBACE,MAAM,SAAS,gBACf,MAAM,UAAU,WAAW,GAC3B;AACA;AAAA,kBACF;AAEA,wBAAM,YAAY,MAAM;AACxB,0BAAQ,WAAW;AAAA,oBACjB,KAAK,cAAc;AACjB,0BAAI,eAAe;AAGjB,8BAAM,mBACJ,sBAAsB,uBAClB,MAAM,UAAU,UAAU,IAC1B,MAAM;AAEZ,4BAAI,iBAAiB,WAAW,GAAG;AACjC;AAAA,wBACF;AAEA,6CAAqB;AACrB,uCAAe;AAEf,8BAAM,QAAQ,sBAAsB,WAAW;AAG/C,4BAAI,SAAS,MAAM;AACjB,wCAAc,MAAM;AAEpB,gCAAM,iBAAiB;AAAA,4BACrB;AAAA,4BACA,OAAO;AAAA,8BACL,MAAM;AAAA,8BACN,WAAW,MAAM,SAAS,MAAM;AAAA,4BAClC;AAAA,0BACF,CAAC;AAAA,wBACH;AAAA,sBACF,OAAO;AACL,8BAAM,iBAAiB,EAAE,YAAY,MAAM,CAAC;AAAA,sBAC9C;AAEA;AAAA,oBACF;AAAA,oBAEA,KAAK,aAAa;AAChB,iCAAW,QAAQ,KAAK;AAExB,oCAAc,KAAK,KAAK;AACxB;AAAA,oBACF;AAAA,oBAEA,KAAK,eAAe;AAClB,iCAAW,QAAQ,KAAK;AAExB,sCAAgB,KAAK,KAAK;AAC1B;AAAA,oBACF;AAAA,oBAEA,KAAK,qBAAqB;AACxB,qCAAe;AAAA,wBACb,KAAIE,OAAA,MAAM,OAAN,OAAAA,OAAY,aAAa;AAAA,wBAC7B,YAAW,WAAM,cAAN,YAAmB,aAAa;AAAA,wBAC3C,UAAS,WAAM,YAAN,YAAiB,aAAa;AAAA,sBACzC;AACA;AAAA,oBACF;AAAA,oBAEA,KAAK,UAAU;AAGb,kCAAY,MAAM;AAClB,yCAAmB,MAAM;AACzB,6CACE,MAAM;AACR,qCAAe,MAAM;AAIrB,4BAAM,aAAaF,KAAI,IAAI;AAC3B,mCAAa,SAAS,kBAAkB;AACxC,mCAAa,cAAc;AAAA,wBACzB,0BAA0B;AAAA,wBAC1B,4CACG,MAAO,UAAU,mBAAoB;AAAA,sBAC1C,CAAC;AAED;AAAA,oBACF;AAAA,oBAEA,KAAK;AAAA,oBACL,KAAK,mBAAmB;AACtB,iCAAW,QAAQ,KAAK;AACxB;AAAA,oBACF;AAAA,oBAEA,KAAK,SAAS;AACZ,iCAAW,QAAQ,KAAK;AACxB,yCAAmB;AACnB;AAAA,oBACF;AAAA,oBAEA,SAAS;AACP,4BAAM,kBAAyB;AAC/B,4BAAM,IAAI,MAAM,uBAAuB,eAAe,EAAE;AAAA,oBAC1D;AAAA,kBACF;AAAA,gBACF;AAAA;AAAA,gBAGA,MAAM,MAAM,YAAY;AACtB,wBAAM,oBACJ,cAAc,SAAS,IACnB,KAAK,UAAU,aAAa,IAC5B;AAGN,sBAAI,eACF;AACF,sBAAI,cAAc,IAAI,UAAU;AAC9B,wBACE,iBACA,qBAAqB;AAAA,oBAErB,cAAc,WAAW,GACzB;AACA,qCAAe;AAAA,oBACjB;AAAA;AAAA,sBAEE,cAAc,SAAS;AAAA,sBAEvB,gBAAgB,WAAW,cAAc;AAAA,sBACzC;AACA,qCAAe;AAAA,oBACjB;AAAA,kBACF;AAIA,sBACE,iBACA,YAAY,SAAS,MACpB,iBAAiB;AAAA,kBACfG,cAAa,cAAc,CAAC,qBAC/B;AACA,0BAAM,iBAAiB;AAAA,sBACrB;AAAA,sBACA,OAAO;AAAA,wBACL,MAAM;AAAA,wBACN,WAAW;AAAA,sBACb;AAAA,oBACF,CAAC;AACD,kCAAc;AAAA,kBAChB;AAGA,sBAAI;AACF,iCAAa;AAAA,sBACX,0BAA0B;AAAA,wBACxB;AAAA,wBACA,YAAY;AAAA,0BACV,4BAA4B;AAAA,0BAC5B,oBAAoB,EAAE,QAAQ,MAAM,SAAS;AAAA,0BAC7C,yBAAyB;AAAA,4BACvB,QAAQ,MAAM;AAAA,0BAChB;AAAA,0BACA,kBAAkB,aAAa;AAAA,0BAC/B,qBAAqB,aAAa;AAAA,0BAClC,yBACE,aAAa,UAAU,YAAY;AAAA,0BAErC,yBAAyB,UAAU;AAAA,0BACnC,6BACE,UAAU;AAAA;AAAA,0BAGZ,kCAAkC,CAAC,gBAAgB;AAAA,0BACnD,sBAAsB,aAAa;AAAA,0BACnC,yBAAyB,aAAa;AAAA,0BACtC,6BAA6B,UAAU;AAAA,0BACvC,8BACE,UAAU;AAAA,wBACd;AAAA,sBACF,CAAC;AAAA,oBACH;AAAA,kBACF,SAAS,OAAO;AAAA,kBAEhB,UAAE;AAEA,iCAAa,IAAI;AAAA,kBACnB;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,OAAO;AAAA,oBACP,+BAA+B;AAAA,oBAC/B,UAAU;AAAA,oBACV,SAAS;AAAA,oBACT,UAAU;AAAA,sBACR,GAAG;AAAA,sBACH,SAAS,2CAAa;AAAA,oBACxB;AAAA,oBACA;AAAA,oBACA,aAAa,iBAAiB;AAAA,kBAChC,CAAC;AAED,wBAAM,gBAAgB,sBAAsB,OAAO,SAAS;AAE5D,sBAAI,iBAAiB,QAAQ;AAC3B,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,OAAO;AAAA,sBACP,+BAA+B;AAAA,sBAC/B,UAAU;AAAA,sBACV,UAAU;AAAA,wBACR,GAAG;AAAA,wBACH,SAAS,2CAAa;AAAA,sBACxB;AAAA,oBACF,CAAC;AAED,yBAAK,YAAY;AAAA,kBACnB,OAAO;AAEL,wBAAIA,cAAa,YAAY;AAI3B,4BAAM,cAAc,iBAClB,iBAAiB,SAAS,CAC5B;AAEA,0BAAI,OAAO,YAAY,YAAY,UAAU;AAC3C,oCAAY,WAAW;AAAA,sBACzB,OAAO;AACL,oCAAY,QAAQ,KAAK;AAAA,0BACvB,MAAM;AAAA,0BACN,MAAM;AAAA,wBACR,CAAC;AAAA,sBACH;AAAA,oBACF,OAAO;AACL,uCAAiB;AAAA,wBACf,GAAG,mBAAmB;AAAA,0BACpB,MAAM;AAAA,0BACN,OAAO,wBAAU,CAAC;AAAA,0BAClB,WAAW;AAAA,0BACX,aAAa;AAAA,wBACf,CAAC;AAAA,sBACH;AAAA,oBACF;AAEA,0BAAM,WAAW;AAAA,sBACf,aAAa,cAAc;AAAA,sBAC3B;AAAA,sBACA,OAAO;AAAA,sBACP,UAAU;AAAA,sBACV,kBAAkB;AAAA,sBAClB,sBAAsB;AAAA,oBACxB,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAGA,cAAM,WAAW;AAAA,UACf,aAAa;AAAA,UACb,kBAAkB,CAAC;AAAA,UACnB,OAAO;AAAA,YACL,cAAc;AAAA,YACd,kBAAkB;AAAA,YAClB,aAAa;AAAA,UACf;AAAA,UACA,kBAAkB;AAAA,UAClB,UAAU;AAAA,UACV,sBAAsB;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF,CAAC,EAAE,MAAM,WAAS;AAEhB,WAAK;AAAA,QACH,IAAI,eAAe;AAAA,UACjB,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,SAAS,MAAM,CAAC;AAC3C,uBAAW,MAAM;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AACA,WAAK,YAAY;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK,oBAAoB;AAAA,EAClC;AAAA,EAEA,IAAI,gCAAgC;AAClC,WAAO,KAAK,wBAAwB;AAAA,EACtC;AAAA,EAEA,IAAI,OAAO;AACT,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,KAAK,iBAAiB;AAAA,EAC/B;AAAA,EAEA,IAAI,cAAc;AAChB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,YAAY;AAClB,UAAM,CAAC,SAAS,OAAO,IAAI,KAAK,WAAW,IAAI;AAC/C,SAAK,aAAa;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,aAA0C;AAC5C,WAAO;AAAA,MACL,KAAK,UAAU,EAAE;AAAA,QACf,IAAI,gBAA+C;AAAA,UACjD,UAAU,OAAO,YAAY;AAC3B,gBAAI,MAAM,SAAS,cAAc;AAC/B,yBAAW,QAAQ,MAAM,SAAS;AAAA,YACpC,WAAW,MAAM,SAAS,SAAS;AACjC,yBAAW,MAAM,MAAM,KAAK;AAAA,YAC9B;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,aAAyD;AAC3D,WAAO,0BAA0B,KAAK,UAAU,CAAC;AAAA,EACnD;AAAA,EAEQ,qBAAqB;AAAA,IAC3B,iBAAAG,mBAAkB,MAAM;AAAA;AAAA,IACxB,YAAY;AAAA,EACd,IAGI,CAAC,GAAqC;AACxC,QAAI,qBAAqB;AAEzB,UAAM,sBAAsB,IAAI,gBAG9B;AAAA,MACA,MAAM,UAAU,OAAO,YAA2B;AAChD,mBAAW,QAAQ,KAAK;AAExB,YAAI,MAAM,SAAS,cAAc;AAC/B,gCAAsB,MAAM;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,yBAAyB,IAAI,gBAGjC;AAAA,MACA,WAAW,OAAO,OAAO,eAAe;AACtC,cAAM,YAAY,MAAM;AACxB,gBAAQ,WAAW;AAAA,UACjB,KAAK,cAAc;AACjB,uBAAW,YAAQ,uCAAqB,QAAQ,MAAM,SAAS,CAAC;AAChE;AAAA,UACF;AAAA,UAEA,KAAK,6BAA6B;AAChC,uBAAW;AAAA,kBACT,uCAAqB,6BAA6B;AAAA,gBAChD,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,cAClB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,mBAAmB;AACtB,uBAAW;AAAA,kBACT,uCAAqB,mBAAmB;AAAA,gBACtC,YAAY,MAAM;AAAA,gBAClB,eAAe,MAAM;AAAA,cACvB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,aAAa;AAChB,uBAAW;AAAA,kBACT,uCAAqB,aAAa;AAAA,gBAChC,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM;AAAA,gBAChB,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,eAAe;AAClB,uBAAW;AAAA,kBACT,uCAAqB,eAAe;AAAA,gBAClC,YAAY,MAAM;AAAA,gBAClB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,SAAS;AACZ,uBAAW;AAAA,kBACT,uCAAqB,SAASA,iBAAgB,MAAM,KAAK,CAAC;AAAA,YAC5D;AACA;AAAA,UACF;AAAA,UAEA,KAAK,eAAe;AAClB,uBAAW;AAAA,kBACT,uCAAqB,eAAe;AAAA,gBAClC,cAAc,MAAM;AAAA,gBACpB,OAAO,YACH;AAAA,kBACE,cAAc,MAAM,MAAM;AAAA,kBAC1B,kBAAkB,MAAM,MAAM;AAAA,gBAChC,IACA;AAAA,gBACJ,aAAa,MAAM;AAAA,cACrB,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,uBAAW;AAAA,kBACT,uCAAqB,kBAAkB;AAAA,gBACrC,cAAc,MAAM;AAAA,gBACpB,OAAO,YACH;AAAA,kBACE,cAAc,MAAM,MAAM;AAAA,kBAC1B,kBAAkB,MAAM,MAAM;AAAA,gBAChC,IACA;AAAA,cACN,CAAC;AAAA,YACH;AACA;AAAA,UACF;AAAA,UAEA,SAAS;AACP,kBAAM,kBAAyB;AAC/B,kBAAM,IAAI,MAAM,uBAAuB,eAAe,EAAE;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK,WACT,YAAY,mBAAmB,EAC/B,YAAY,sBAAsB;AAAA,EACvC;AAAA,EAEA,yBACE,UACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAAA;AAAA,IACA;AAAA,EACF,IAII,CAAC,GACL;AACA,0BAAsB;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS,2BAA2B,SAAS;AAAA,QAC3C,aAAa;AAAA,QACb,mBAAmB;AAAA,MACrB,CAAC;AAAA,MACD,QAAQ,KAAK,aAAa,EAAE,MAAM,iBAAAA,kBAAiB,UAAU,CAAC;AAAA,IAChE,CAAC;AAAA,EACH;AAAA,EAEA,yBAAyB,UAA0B,MAAqB;AACtE,0BAAsB;AAAA,MACpB;AAAA,MACA,QAAQ,6BAAM;AAAA,MACd,YAAY,6BAAM;AAAA,MAClB,SAAS,2BAA2B,6BAAM,SAAS;AAAA,QACjD,aAAa;AAAA,MACf,CAAC;AAAA,MACD,QAAQ,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,aAAa,SAIV;AACD,UAAM,SAAS,KAAK,qBAAqB;AAAA,MACvC,iBAAiB,mCAAS;AAAA,MAC1B,WAAW,mCAAS;AAAA,IACtB,CAAC,EAAE,YAAY,IAAI,kBAAkB,CAAC;AAEtC,YAAO,mCAAS,QAAO,aAAa,mCAAS,KAAK,QAAQ,MAAM,IAAI;AAAA,EACtE;AAAA,EAEA,oBAAoB,QAA0B;AAC5C,WAAO;AAAA,MACL,KAAK,qBAAqB;AAAA,QACxB,iBAAiB,OAAO;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,qBAAqB;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAAA;AAAA,IACA;AAAA,EACF,IAII,CAAC,GAAa;AAChB,WAAO,IAAI;AAAA,MACT,KAAK,aAAa,EAAE,MAAM,iBAAAA,kBAAiB,UAAU,CAAC;AAAA,MACtD;AAAA,QACE;AAAA,QACA;AAAA,QACA,SAAS,uBAAuB,SAAS;AAAA,UACvC,aAAa;AAAA,UACb,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,qBAAqB,MAA+B;AAr4CtD,QAAAJ;AAs4CI,WAAO,IAAI,SAAS,KAAK,WAAW,YAAY,IAAI,kBAAkB,CAAC,GAAG;AAAA,MACxE,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,MACxB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,QAC7C,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;;;AGn4CO,SAAS,aAAqD;AAAA,EACnE,YAAY;AAAA,EACZ,WAAW,EAAE,OAAAK,SAAQ,MAAc,IAAI,CAAC;AAC1C,IASI,CAAC,GAE+D;AAClE,MAAI,SAAS;AAEb,SAAO,MACL,IAAI,gBAA8D;AAAA,IAChE,MAAM,UAAU,OAAO,YAAY;AACjC,UAAI,MAAM,SAAS,eAAe;AAChC,YAAI,OAAO,SAAS,GAAG;AACrB,qBAAW,QAAQ,EAAE,MAAM,cAAc,WAAW,OAAO,CAAC;AAC5D,mBAAS;AAAA,QACX;AAEA,mBAAW,QAAQ,KAAK;AACxB;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,cAAc;AAC/B,mBAAW,QAAQ,KAAK;AACxB;AAAA,MACF;AAEA,gBAAU,MAAM;AAIhB,YAAM,SAAS;AACf,aAAO,OAAO,KAAK,MAAM,GAAG;AAC1B,cAAMC,SAAQ,OAAO,MAAM,MAAM,EAAG,CAAC;AACrC,mBAAW,QAAQ,EAAE,MAAM,cAAc,WAAWA,OAAM,CAAC;AAC3D,iBAAS,OAAO,MAAMA,OAAM,MAAM;AAElC,YAAI,YAAY,GAAG;AACjB,gBAAMD,OAAM,SAAS;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACL;;;AC9CO,IAAM,iCAAiC,CAAC;AAAA,EAC7C;AAAA,EACA,YAAY,EAAE,iBAAiB,cAAc,WAAW;AAAA,EACxD;AAAA,EACA;AACF,MAKuB;AACrB,iBAAe,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,EACF,GAGG;AACD,WAAO,kBAAkB,MAAM,gBAAgB,EAAE,QAAQ,KAAK,CAAC,IAAI;AAAA,EACrE;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IAEtB,UAAU,kCAAc,MAAM;AAAA,IAC9B,SAAS,4BAAW,MAAM;AAAA,IAE1B,6BAA6B,MAAM;AAAA,IACnC,mBAAmB,MAAM;AAAA,IACzB,aAAa,MAAM;AAAA,IACnB,2BAA2B,MAAM;AAAA,IAEjC,MAAM,WACJ,QAC6D;AAC7D,YAAM,oBAAoB,MAAM,YAAY,EAAE,QAAQ,MAAM,WAAW,CAAC;AACxE,YAAM,aAAa,YAAY,MAAM,WAAW,iBAAiB;AACjE,aAAO,eACH,aAAa,EAAE,YAAY,QAAQ,mBAAmB,MAAM,CAAC,IAC7D,WAAW;AAAA,IACjB;AAAA,IAEA,MAAM,SACJ,QAC2D;AAC3D,YAAM,oBAAoB,MAAM,YAAY,EAAE,QAAQ,MAAM,SAAS,CAAC;AACtE,YAAM,WAAW,YAAY,MAAM,SAAS,iBAAiB;AAC7D,aAAO,aACH,WAAW,EAAE,UAAU,QAAQ,mBAAmB,MAAM,CAAC,IACzD,SAAS;AAAA,IACf;AAAA,EACF;AACF;;;ACjEA,IAAAE,oBAAiC;AAa1B,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF,GAIa;AACX,SAAO;AAAA,IACL,cAAc,SAAkC;AAC9C,UAAI,kBAAkB,QAAQ,WAAW,gBAAgB;AACvD,eAAO,eAAe,OAAO;AAAA,MAC/B;AAEA,UAAI,kBAAkB;AACpB,eAAO,iBAAiB,cAAc,OAAO;AAAA,MAC/C;AAEA,YAAM,IAAI,mCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,IACpE;AAAA,IAEA,mBAAmB,SAA2C;AAC5D,UAAI,uBAAuB,QAAQ,WAAW,qBAAqB;AACjE,eAAO,oBAAoB,OAAO;AAAA,MACpC;AAEA,UAAI,kBAAkB;AACpB,eAAO,iBAAiB,mBAAmB,OAAO;AAAA,MACpD;AAEA,YAAM,IAAI,mCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,IACzE;AAAA,EACF;AACF;;;ACjDA,IAAAC,oBAA6C;AAE7C,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,sBAAN,cAAkC,mCAAiB;AAAA,EAMxD,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,qBAAqB,UAAU,0BAA0B,mBAAmB,KAAK,CAAC;AAAA,EAC9F,GAMG;AACD,UAAM,EAAE,WAAWH,QAAM,SAAS,WAAW,QAAQ,CAAC;AAlBxD,SAAkBG,QAAU;AAoB1B,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,OAAO,WAAW,OAA8C;AAC9D,WAAO,6BAAW,UAAU,OAAOF,QAAM;AAAA,EAC3C;AACF;AA3BoBE,OAAAD;;;ACPpB,IAAAE,oBAAiC;AAO1B,SAAS,oCACd,WACU;AACV,QAAM,WAAW,IAAI,wBAAwB;AAE7C,aAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,SAAS,GAAG;AACtD,aAAS,iBAAiB,EAAE,IAAI,SAAS,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;AAEA,IAAM,0BAAN,MAAkD;AAAA,EAAlD;AACE,SAAQ,YAAsC,CAAC;AAAA;AAAA,EAE/C,iBAAiB,EAAE,IAAI,SAAS,GAA6C;AAC3E,SAAK,UAAU,EAAE,IAAI;AAAA,EACvB;AAAA,EAEQ,YAAY,IAAsB;AACxC,UAAM,WAAW,KAAK,UAAU,EAAE;AAElC,QAAI,YAAY,MAAM;AACpB,YAAM,IAAI,oBAAoB;AAAA,QAC5B,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,oBAAoB,OAAO,KAAK,KAAK,SAAS;AAAA,MAChD,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,QACN,IACA,WACkB;AAClB,UAAM,QAAQ,GAAG,QAAQ,GAAG;AAE5B,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,mCAAiB;AAAA,QACzB,SAAS;AAAA,QACT;AAAA,QACA,SACE,WAAW,SAAS,qBAAqB,EAAE;AAAA,MAE/C,CAAC;AAAA,IACH;AAEA,WAAO,CAAC,GAAG,MAAM,GAAG,KAAK,GAAG,GAAG,MAAM,QAAQ,CAAC,CAAC;AAAA,EACjD;AAAA,EAEA,cAAc,IAA2B;AA5D3C,QAAAC,MAAA;AA6DI,UAAM,CAAC,YAAY,OAAO,IAAI,KAAK,QAAQ,IAAI,eAAe;AAC9D,UAAM,SAAQ,MAAAA,OAAA,KAAK,YAAY,UAAU,GAAE,kBAA7B,wBAAAA,MAA6C;AAE3D,QAAI,SAAS,MAAM;AACjB,YAAM,IAAI,mCAAiB,EAAE,SAAS,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACxE;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,mBAAmB,IAAoC;AAvEzD,QAAAA;AAwEI,UAAM,CAAC,YAAY,OAAO,IAAI,KAAK,QAAQ,IAAI,oBAAoB;AACnE,UAAM,WAAW,KAAK,YAAY,UAAU;AAE5C,UAAM,SAAQA,OAAA,SAAS,uBAAT,gBAAAA,KAAA,eAA8B;AAE5C,QAAI,SAAS,MAAM;AACjB,YAAM,IAAI,mCAAiB;AAAA,QACzB,SAAS;AAAA,QACT,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,IAAoC;AAChD,WAAO,KAAK,mBAAmB,EAAE;AAAA,EACnC;AACF;;;ACuBO,SAAS,KACdC,OAC8B;AAC9B,SAAOA;AACT;;;AC9GO,SAAS,iBAAiB,SAAmB,SAAmB;AACrE,MAAI,QAAQ,WAAW,QAAQ,QAAQ;AACrC,UAAM,IAAI;AAAA,MACR,+CAA+C,QAAQ,MAAM,uBAAuB,QAAQ,MAAM;AAAA,IACpG;AAAA,EACF;AAEA,SACE,WAAW,SAAS,OAAO,KAAK,UAAU,OAAO,IAAI,UAAU,OAAO;AAE1E;AAQA,SAAS,WAAW,SAAmB,SAAmB;AACxD,SAAO,QAAQ;AAAA,IACb,CAAC,aAAqB,OAAe,UACnC,cAAc,QAAQ,QAAQ,KAAK;AAAA,IACrC;AAAA,EACF;AACF;AAOA,SAAS,UAAU,QAAkB;AACnC,SAAO,KAAK,KAAK,WAAW,QAAQ,MAAM,CAAC;AAC7C;;;AC3CA,IAAAC,oBAIO;AA0CA,SAAS,kBACd,EAAE,UAAU,UAAU,GACtBC,UACU;AACV,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,MAAM,YAAY;AAnD5B,UAAAC;AAoDM,YAAM,cAAc,IAAI,YAAY;AAEpC,YAAM,cAAc,CAAC,YAA8B;AACjD,mBAAW;AAAA,UACT,YAAY;AAAA,gBACV,6CAA0B,qBAAqB,OAAO;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,kBAAkB,CAAC,YAAyB;AAChD,mBAAW;AAAA,UACT,YAAY;AAAA,gBACV,6CAA0B,gBAAgB,OAAO;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,YAAY,CAAC,iBAAyB;AAC1C,mBAAW;AAAA,UACT,YAAY,WAAO,6CAA0B,SAAS,YAAY,CAAC;AAAA,QACrE;AAAA,MACF;AAEA,YAAM,gBAAgB,OAAOC,YAAgB;AA5EnD,YAAAD,MAAA;AA6EQ,YAAI,SAA0B;AAE9B,yBAAiB,SAASC,SAAQ;AAChC,kBAAQ,MAAM,OAAO;AAAA,YACnB,KAAK,0BAA0B;AAC7B,yBAAW;AAAA,gBACT,YAAY;AAAA,sBACV,6CAA0B,qBAAqB;AAAA,oBAC7C,IAAI,MAAM,KAAK;AAAA,oBACf,MAAM;AAAA,oBACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,OAAO,GAAG,EAAE,CAAC;AAAA,kBACjD,CAAC;AAAA,gBACH;AAAA,cACF;AACA;AAAA,YACF;AAAA,YAEA,KAAK,wBAAwB;AAC3B,oBAAM,WAAUD,OAAA,MAAM,KAAK,MAAM,YAAjB,gBAAAA,KAA2B;AAE3C,mBAAI,mCAAS,UAAS,YAAU,aAAQ,SAAR,mBAAc,UAAS,MAAM;AAC3D,2BAAW;AAAA,kBACT,YAAY;AAAA,wBACV,6CAA0B,QAAQ,QAAQ,KAAK,KAAK;AAAA,kBACtD;AAAA,gBACF;AAAA,cACF;AAEA;AAAA,YACF;AAAA,YAEA,KAAK;AAAA,YACL,KAAK,8BAA8B;AACjC,uBAAS,MAAM;AACf;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAGA,iBAAW;AAAA,QACT,YAAY;AAAA,cACV,6CAA0B,0BAA0B;AAAA,YAClD;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI;AACF,cAAMD,SAAQ;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,mBAAWC,OAAA,MAAc,YAAd,OAAAA,OAAyB,GAAG,KAAK,EAAE;AAAA,MAChD,UAAE;AACA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AAAA,IAAC;AAAA,IAClB,SAAS;AAAA,IAAC;AAAA,EACZ,CAAC;AAED,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,IAClB;AAAA,EACF,CAAC;AACH;;;ACvJA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAE,oBAAqC;;;AC8C9B,SAAS,2BACd,YAAyC,CAAC,GACL;AACrC,QAAM,cAAc,IAAI,YAAY;AACpC,MAAI,qBAAqB;AAEzB,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,QAAuB;AAC3B,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ;AAAA,IACjD;AAAA,IAEA,MAAM,UAAU,SAAS,YAA2B;AAClD,iBAAW,QAAQ,YAAY,OAAO,OAAO,CAAC;AAE9C,4BAAsB;AAEtB,UAAI,UAAU;AAAS,cAAM,UAAU,QAAQ,OAAO;AACtD,UAAI,UAAU,UAAU,OAAO,YAAY,UAAU;AACnD,cAAM,UAAU,OAAO,OAAO;AAAA,MAChC;AAAA,IACF;AAAA,IAEA,MAAM,QAAuB;AAC3B,UAAI,UAAU,cAAc;AAC1B,cAAM,UAAU,aAAa,kBAAkB;AAAA,MACjD;AACA,UAAI,UAAU,SAAS;AACrB,cAAM,UAAU,QAAQ,kBAAkB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AD5BA,SAAS,qBACP,QAIA,WACA;AACA,SAAO,OACJ;AAAA,IACC,IAAI,gBAEF;AAAA,MACA,WAAW,OAAO,OAAO,eAAe;AA7DhD,YAAAC;AA+DU,YAAI,OAAO,UAAU,UAAU;AAC7B,qBAAW,QAAQ,KAAK;AACxB;AAAA,QACF;AAGA,YAAI,WAAW,OAAO;AAEpB,cAAI,MAAM,UAAU,wBAAwB;AAC1C;AAAA,eACEA,OAAA,MAAM,SAAN,gBAAAA,KAAY;AAAA,cACZ;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF;AAGA,8BAAsB,OAAO,UAAU;AAAA,MACzC;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD,YAAY,IAAI,kBAAkB,CAAC,EACnC;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,YAAQ,wCAAqB,QAAQ,KAAK,CAAC;AAAA,MACxD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AASO,SAAS,aACd,QAIA,WACA;AACA,SAAO,qBAAqB,QAAQ,SAAS,EAAE;AAAA,IAC7C,IAAI,kBAAkB;AAAA,EACxB;AACF;AAEO,SAAS,qBACd,QAIA,SAKA;AA7HF,MAAAA;AA8HE,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,mCAAS;AAAA,EACX,EAAE,YAAY,IAAI,kBAAkB,CAAC;AACrC,QAAM,OAAO,mCAAS;AACtB,QAAM,OAAO,mCAAS;AAEtB,QAAM,iBAAiB,OACnB,aAAa,KAAK,QAAQ,UAAU,IACpC;AAEJ,SAAO,IAAI,SAAS,gBAAgB;AAAA,IAClC,SAAQA,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,IACxB,YAAY,6BAAM;AAAA,IAClB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,MAC7C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAAS,oBACd,QAIA,SACA;AACA,UAAQ,WAAW,MAAM,qBAAqB,QAAQ,QAAQ,SAAS,CAAC;AAC1E;AAEA,SAAS,sBACP,OACA,YACA;AACA,MAAI,OAAO,MAAM,YAAY,UAAU;AACrC,eAAW,QAAQ,MAAM,OAAO;AAAA,EAClC,OAAO;AACL,UAAM,UAA4C,MAAM;AACxD,eAAW,QAAQ,SAAS;AAC1B,UAAI,KAAK,SAAS,QAAQ;AACxB,mBAAW,QAAQ,KAAK,IAAI;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AACF;;;AE3KA;AAAA;AAAA,6BAAAC;AAAA,EAAA,oBAAAC;AAAA,EAAA,4BAAAC;AAAA;AAAA,IAAAC,0BAAqD;AACrD,IAAAC,oBAAqC;AAcrC,SAASC,sBACP,QACA,WACA;AACA,QAAM,YAAY,kBAAkB;AAEpC,aAAO,8DAAqC,OAAO,OAAO,aAAa,EAAE,CAAC,EACvE;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,MAAM,UAAU,SAAS,YAA2B;AAClD,mBAAW,QAAQ,UAAU,QAAQ,KAAK,CAAC;AAAA,MAC7C;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD,YAAY,IAAI,kBAAkB,CAAC,EACnC;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,YAAQ,wCAAqB,QAAQ,KAAK,CAAC;AAAA,MACxD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AAEO,SAASC,cACd,QACA,WACA;AACA,SAAOD,sBAAqB,QAAQ,SAAS,EAAE;AAAA,IAC7C,IAAI,kBAAkB;AAAA,EACxB;AACF;AAEO,SAASE,sBACd,QACA,UAII,CAAC,GACL;AAxDF,MAAAC;AAyDE,QAAM,EAAE,MAAM,MAAM,UAAU,IAAI;AAClC,QAAM,aAAaH,sBAAqB,QAAQ,SAAS,EAAE;AAAA,IACzD,IAAI,kBAAkB;AAAA,EACxB;AACA,QAAM,iBAAiB,OACnB,aAAa,KAAK,QAAQ,UAAU,IACpC;AAEJ,SAAO,IAAI,SAAS,gBAAgB;AAAA,IAClC,SAAQG,OAAA,6BAAM,WAAN,OAAAA,OAAgB;AAAA,IACxB,YAAY,6BAAM;AAAA,IAClB,SAAS,uBAAuB,6BAAM,SAAS;AAAA,MAC7C,aAAa;AAAA,MACb,mBAAmB;AAAA,IACrB,CAAC;AAAA,EACH,CAAC;AACH;AAEO,SAASC,qBACd,QACA,SAIA;AACA,UAAQ,WAAW,MAAMJ,sBAAqB,QAAQ,QAAQ,SAAS,CAAC;AAC1E;AAEA,SAAS,oBAA8C;AACrD,MAAI,gBAAgB;AAEpB,SAAO,CAACK,UAAyB;AAC/B,QAAI,eAAe;AACjB,MAAAA,QAAOA,MAAK,UAAU;AACtB,UAAIA;AAAM,wBAAgB;AAAA,IAC5B;AACA,WAAOA;AAAA,EACT;AACF;;;AC/FA,IAAAC,oBAAgD;;;ACIzC,IAAM,iCAAiC,KAAK;;;ADI5C,IAAM,aAAN,MAAiB;AAAA,EAStB,cAAc;AARd,SAAQ,UAAU,IAAI,YAAY;AAElC,SAAQ,aAA0D;AAGlE,SAAQ,WAAoB;AAC5B,SAAQ,iBAAwC;AAG9C,UAAM,OAAO;AAEb,SAAK,SAAS,IAAI,eAAe;AAAA,MAC/B,OAAO,OAAM,eAAc;AACzB,aAAK,aAAa;AAGlB,YAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,eAAK,iBAAiB,WAAW,MAAM;AACrC,oBAAQ;AAAA,cACN;AAAA,YACF;AAAA,UACF,GAAG,8BAA8B;AAAA,QACnC;AAAA,MACF;AAAA,MACA,MAAM,gBAAc;AAAA,MAEpB;AAAA,MACA,QAAQ,YAAU;AAChB,aAAK,WAAW;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW,MAAM;AACtB,SAAK,WAAW;AAGhB,QAAI,KAAK,gBAAgB;AACvB,mBAAa,KAAK,cAAc;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,OAAO,OAAwB;AAC7B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW;AAAA,MACd,KAAK,QAAQ,WAAO,wCAAqB,QAAQ,CAAC,KAAK,CAAC,CAAC;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,wBAAwB,OAAwB;AAC9C,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,SAAK,WAAW;AAAA,MACd,KAAK,QAAQ,WAAO,wCAAqB,uBAAuB,CAAC,KAAK,CAAC,CAAC;AAAA,IAC1E;AAAA,EACF;AACF;","names":["import_ui_utils","import_provider_utils","import_ui_utils","import_provider","import_provider","name","marker","symbol","_a","_a","name","import_api","name","attributes","_a","embedding","usage","embeddings","_a","usage","import_provider_utils","import_provider_utils","import_provider","name","marker","symbol","_a","text","import_provider","name","marker","symbol","_a","_a","import_provider_utils","import_provider","name","marker","symbol","_a","_a","import_provider","name","marker","symbol","_a","_a","import_provider","import_provider_utils","import_zod","import_zod","import_zod","import_zod","import_zod","import_zod","_a","import_provider","name","marker","symbol","_a","_a","tool","import_provider","import_provider_utils","import_ui_utils","_a","generateId","_a","span","_b","result","import_provider_utils","import_ui_utils","_a","_a","originalGenerateId","generateId","now","doStreamSpan","object","_a","error","import_provider_utils","import_provider","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_provider","name","marker","symbol","_a","import_ui_utils","object","name","tool","text","text","import_provider_utils","import_ui_utils","tool","text","tool","originalGenerateId","generateId","_a","text","tool","span","_b","_c","_d","_e","_f","result","import_provider_utils","import_ui_utils","text","import_provider_utils","import_ui_utils","result","import_ui_utils","tool","originalGenerateId","now","generateId","_a","stepType","tool","doStreamSpan","getErrorMessage","delay","chunk","import_provider","import_provider","name","marker","symbol","_a","import_provider","_a","tool","import_ui_utils","process","_a","stream","import_ui_utils","_a","mergeIntoDataStream","toDataStream","toDataStreamResponse","import_provider_utils","import_ui_utils","toDataStreamInternal","toDataStream","toDataStreamResponse","_a","mergeIntoDataStream","text","import_ui_utils"]}
|