@ai-sdk/openai 2.0.0-canary.4 → 2.0.0-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.js +5 -22
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +5 -22
- package/dist/index.mjs.map +1 -1
- package/{internal/dist → dist/internal}/index.js +5 -22
- package/dist/internal/index.js.map +1 -0
- package/{internal/dist → dist/internal}/index.mjs +5 -22
- package/dist/internal/index.mjs.map +1 -0
- package/package.json +11 -12
- package/internal/dist/index.js.map +0 -1
- package/internal/dist/index.mjs.map +0 -1
- /package/{internal/dist → dist/internal}/index.d.mts +0 -0
- /package/{internal/dist → dist/internal}/index.d.ts +0 -0
|
@@ -632,8 +632,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
632
632
|
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
|
|
633
633
|
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
|
|
634
634
|
},
|
|
635
|
-
|
|
636
|
-
request: { body: JSON.stringify(body) },
|
|
635
|
+
request: { body },
|
|
637
636
|
response: {
|
|
638
637
|
...getResponseMetadata(response),
|
|
639
638
|
headers: responseHeaders,
|
|
@@ -683,7 +682,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
683
682
|
});
|
|
684
683
|
return {
|
|
685
684
|
stream: simulatedStream,
|
|
686
|
-
rawCall: result.rawCall,
|
|
687
685
|
response: result.response,
|
|
688
686
|
warnings: result.warnings
|
|
689
687
|
};
|
|
@@ -893,9 +891,8 @@ var OpenAIChatLanguageModel = class {
|
|
|
893
891
|
}
|
|
894
892
|
})
|
|
895
893
|
),
|
|
896
|
-
|
|
894
|
+
request: { body },
|
|
897
895
|
response: { headers: responseHeaders },
|
|
898
|
-
request: { body: JSON.stringify(body) },
|
|
899
896
|
warnings
|
|
900
897
|
};
|
|
901
898
|
}
|
|
@@ -1226,7 +1223,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1226
1223
|
abortSignal: options.abortSignal,
|
|
1227
1224
|
fetch: this.config.fetch
|
|
1228
1225
|
});
|
|
1229
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1230
1226
|
const choice = response.choices[0];
|
|
1231
1227
|
return {
|
|
1232
1228
|
text: choice.text,
|
|
@@ -1236,8 +1232,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1236
1232
|
},
|
|
1237
1233
|
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
1238
1234
|
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
1239
|
-
|
|
1240
|
-
request: { body: JSON.stringify(args) },
|
|
1235
|
+
request: { body: args },
|
|
1241
1236
|
response: {
|
|
1242
1237
|
...getResponseMetadata(response),
|
|
1243
1238
|
headers: responseHeaders,
|
|
@@ -1268,7 +1263,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1268
1263
|
abortSignal: options.abortSignal,
|
|
1269
1264
|
fetch: this.config.fetch
|
|
1270
1265
|
});
|
|
1271
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1272
1266
|
let finishReason = "unknown";
|
|
1273
1267
|
let usage = {
|
|
1274
1268
|
promptTokens: Number.NaN,
|
|
@@ -1332,7 +1326,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1332
1326
|
}
|
|
1333
1327
|
})
|
|
1334
1328
|
),
|
|
1335
|
-
rawCall: { rawPrompt, rawSettings },
|
|
1336
1329
|
response: { headers: responseHeaders },
|
|
1337
1330
|
warnings,
|
|
1338
1331
|
request: { body: JSON.stringify(body) }
|
|
@@ -2139,13 +2132,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2139
2132
|
promptTokens: response.usage.input_tokens,
|
|
2140
2133
|
completionTokens: response.usage.output_tokens
|
|
2141
2134
|
},
|
|
2142
|
-
|
|
2143
|
-
rawPrompt: void 0,
|
|
2144
|
-
rawSettings: {}
|
|
2145
|
-
},
|
|
2146
|
-
request: {
|
|
2147
|
-
body: JSON.stringify(body)
|
|
2148
|
-
},
|
|
2135
|
+
request: { body },
|
|
2149
2136
|
response: {
|
|
2150
2137
|
id: response.id,
|
|
2151
2138
|
timestamp: new Date(response.created_at * 1e3),
|
|
@@ -2289,11 +2276,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2289
2276
|
}
|
|
2290
2277
|
})
|
|
2291
2278
|
),
|
|
2292
|
-
|
|
2293
|
-
rawPrompt: void 0,
|
|
2294
|
-
rawSettings: {}
|
|
2295
|
-
},
|
|
2296
|
-
request: { body: JSON.stringify(body) },
|
|
2279
|
+
request: { body },
|
|
2297
2280
|
response: { headers: responseHeaders },
|
|
2298
2281
|
warnings
|
|
2299
2282
|
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/internal/index.ts","../../src/openai-chat-language-model.ts","../../src/convert-to-openai-chat-messages.ts","../../src/map-openai-chat-logprobs.ts","../../src/map-openai-finish-reason.ts","../../src/openai-error.ts","../../src/get-response-metadata.ts","../../src/openai-prepare-tools.ts","../../src/openai-completion-language-model.ts","../../src/convert-to-openai-completion-prompt.ts","../../src/map-openai-completion-logprobs.ts","../../src/openai-embedding-model.ts","../../src/openai-image-model.ts","../../src/openai-image-settings.ts","../../src/openai-transcription-model.ts","../../src/responses/openai-responses-language-model.ts","../../src/responses/convert-to-openai-responses-messages.ts","../../src/responses/map-openai-responses-finish-reason.ts","../../src/responses/openai-responses-prepare-tools.ts"],"sourcesContent":["export * from '../openai-chat-language-model';\nexport * from '../openai-chat-settings';\nexport * from '../openai-completion-language-model';\nexport * from '../openai-completion-settings';\nexport * from '../openai-embedding-model';\nexport * from '../openai-embedding-settings';\nexport * from '../openai-image-model';\nexport * from '../openai-image-settings';\nexport * from '../openai-transcription-model';\nexport * from '../openai-transcription-settings';\nexport * from '../responses/openai-responses-language-model';\n","import {\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2LogProbs,\n LanguageModelV2ProviderMetadata,\n LanguageModelV2StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './openai-prepare-tools';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get supportsStructuredOutputs(): boolean {\n // enable structured outputs for reasoning models by default:\n // TODO in the next major version, remove this and always use json mode for models\n // that support structured outputs (blacklist other models)\n return this.settings.structuredOutputs ?? isReasoningModel(this.modelId);\n }\n\n get defaultObjectGenerationMode() {\n // audio models don't support structured outputs:\n if (isAudioModel(this.modelId)) {\n return 'tool';\n }\n\n return this.supportsStructuredOutputs ? 'json' : 'tool';\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportsImageUrls(): boolean {\n // image urls can be sent if downloadImages is disabled (default):\n return !this.settings.downloadImages;\n }\n\n private getArgs({\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;\n\n if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling with parallelToolCalls',\n });\n }\n\n if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {\n throw new UnsupportedFunctionalityError({\n functionality: 'structuredOutputs with useLegacyFunctionCalling',\n });\n }\n\n const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(\n {\n prompt,\n useLegacyFunctionCalling,\n systemMessageMode: getSystemMessageMode(this.modelId),\n },\n );\n\n warnings.push(...messageWarnings);\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: this.settings.logitBias,\n logprobs:\n this.settings.logprobs === true ||\n typeof this.settings.logprobs === 'number'\n ? true\n : undefined,\n top_logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n // TODO improve below:\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs && responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: true,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n stop: stopSequences,\n seed,\n\n // openai specific settings:\n // TODO remove in next major version; we auto-map maxTokens now\n max_completion_tokens: providerOptions?.openai?.maxCompletionTokens,\n store: providerOptions?.openai?.store,\n metadata: providerOptions?.openai?.metadata,\n prediction: providerOptions?.openai?.prediction,\n reasoning_effort:\n providerOptions?.openai?.reasoningEffort ??\n this.settings.reasoningEffort,\n\n // messages:\n messages,\n };\n\n if (isReasoningModel(this.modelId)) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n if (baseArgs.frequency_penalty != null) {\n baseArgs.frequency_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n details: 'frequencyPenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.presence_penalty != null) {\n baseArgs.presence_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n details: 'presencePenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.logit_bias != null) {\n baseArgs.logit_bias = undefined;\n warnings.push({\n type: 'other',\n message: 'logitBias is not supported for reasoning models',\n });\n }\n if (baseArgs.logprobs != null) {\n baseArgs.logprobs = undefined;\n warnings.push({\n type: 'other',\n message: 'logprobs is not supported for reasoning models',\n });\n }\n if (baseArgs.top_logprobs != null) {\n baseArgs.top_logprobs = undefined;\n warnings.push({\n type: 'other',\n message: 'topLogprobs is not supported for reasoning models',\n });\n }\n\n // reasoning models use max_completion_tokens instead of max_tokens:\n if (baseArgs.max_tokens != null) {\n if (baseArgs.max_completion_tokens == null) {\n baseArgs.max_completion_tokens = baseArgs.max_tokens;\n }\n baseArgs.max_tokens = undefined;\n }\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n functions,\n function_call,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useLegacyFunctionCalling,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n functions,\n function_call,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = body;\n const choice = response.choices[0];\n\n // provider metadata:\n const completionTokenDetails = response.usage?.completion_tokens_details;\n const promptTokenDetails = response.usage?.prompt_tokens_details;\n const providerMetadata: LanguageModelV2ProviderMetadata = { openai: {} };\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata.openai.acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata.openai.rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls:\n this.settings.useLegacyFunctionCalling && choice.message.function_call\n ? [\n {\n toolCallType: 'function',\n toolCallId: generateId(),\n toolName: choice.message.function_call.name,\n args: choice.message.function_call.arguments,\n },\n ]\n : choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),\n providerMetadata,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n\n const simulatedStream = new ReadableStream<LanguageModelV2StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n\n return {\n stream: simulatedStream,\n response: result.response,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: {\n promptTokens: number | undefined;\n completionTokens: number | undefined;\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n };\n let logprobs: LanguageModelV2LogProbs;\n let isFirstChunk = true;\n\n const { useLegacyFunctionCalling } = this.settings;\n\n const providerMetadata: LanguageModelV2ProviderMetadata = { openai: {} };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage = {\n promptTokens: prompt_tokens ?? undefined,\n completionTokens: completion_tokens ?? undefined,\n };\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n providerMetadata.openai.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n providerMetadata.openai.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n const mappedLogprobs = mapOpenAIChatLogProbsOutput(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n\n const mappedToolCalls: typeof delta.tool_calls =\n useLegacyFunctionCalling && delta.function_call != null\n ? [\n {\n type: 'function',\n id: generateId(),\n function: delta.function_call,\n index: 0,\n },\n ]\n : delta.tool_calls;\n\n if (mappedToolCalls != null) {\n for (const toolCallDelta of mappedToolCalls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst openaiTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n arguments: z.string(),\n name: z.string(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n }),\n openaiErrorDataSchema,\n]);\n\nfunction isReasoningModel(modelId: string) {\n return (\n modelId === 'o1' ||\n modelId.startsWith('o1-') ||\n modelId === 'o3' ||\n modelId.startsWith('o3-')\n );\n}\n\nfunction isAudioModel(modelId: string) {\n return modelId.startsWith('gpt-4o-audio-preview');\n}\n\nfunction getSystemMessageMode(modelId: string) {\n if (!isReasoningModel(modelId)) {\n return 'system';\n }\n\n return (\n reasoningModels[modelId as keyof typeof reasoningModels]\n ?.systemMessageMode ?? 'developer'\n );\n}\n\nconst reasoningModels = {\n 'o1-mini': {\n systemMessageMode: 'remove',\n },\n 'o1-mini-2024-09-12': {\n systemMessageMode: 'remove',\n },\n 'o1-preview': {\n systemMessageMode: 'remove',\n },\n 'o1-preview-2024-09-12': {\n systemMessageMode: 'remove',\n },\n 'o3-mini': {\n systemMessageMode: 'developer',\n },\n 'o3-mini-2025-01-31': {\n systemMessageMode: 'developer',\n },\n} as const;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\n\nexport function convertToOpenAIChatMessages({\n prompt,\n useLegacyFunctionCalling = false,\n systemMessageMode = 'system',\n}: {\n prompt: LanguageModelV2Prompt;\n useLegacyFunctionCalling?: boolean;\n systemMessageMode?: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIChatPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIChatPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n },\n };\n } else if (part.mediaType.startsWith('audio/')) {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'audio file parts with URLs',\n });\n }\n\n switch (part.mediaType) {\n case 'audio/wav': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'wav' },\n };\n }\n case 'audio/mp3':\n case 'audio/mpeg': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'mp3' },\n };\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `audio content parts with media type ${part.mediaType}`,\n });\n }\n }\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'file',\n file: {\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n },\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n }\n }\n\n if (useLegacyFunctionCalling) {\n if (toolCalls.length > 1) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'useLegacyFunctionCalling with multiple tool calls in one message',\n });\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n function_call:\n toolCalls.length > 0 ? toolCalls[0].function : undefined,\n });\n } else {\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n }\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n if (useLegacyFunctionCalling) {\n messages.push({\n role: 'function',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n } else {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","import { LanguageModelV2LogProbs } from '@ai-sdk/provider';\n\ntype OpenAIChatLogProbs = {\n content:\n | {\n token: string;\n logprob: number;\n top_logprobs:\n | {\n token: string;\n logprob: number;\n }[]\n | null;\n }[]\n | null;\n};\n\nexport function mapOpenAIChatLogProbsOutput(\n logprobs: OpenAIChatLogProbs | null | undefined,\n): LanguageModelV2LogProbs | undefined {\n return (\n logprobs?.content?.map(({ token, logprob, top_logprobs }) => ({\n token,\n logprob,\n topLogprobs: top_logprobs\n ? top_logprobs.map(({ token, logprob }) => ({\n token,\n logprob,\n }))\n : [],\n })) ?? undefined\n );\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openaiErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openaiErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openaiErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n JSONSchema7,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useLegacyFunctionCalling = false,\n structuredOutputs,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useLegacyFunctionCalling: boolean | undefined;\n structuredOutputs: boolean;\n}): {\n tools?: {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }[];\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'function'; function: { name: string } };\n\n // legacy support\n functions?: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }[];\n function_call?: { name: string };\n toolWarnings: Array<LanguageModelV2CallWarning>;\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n if (useLegacyFunctionCalling) {\n const openaiFunctions: Array<{\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiFunctions.push({\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case undefined:\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n case 'required':\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling and toolChoice: required',\n });\n default:\n return {\n functions: openaiFunctions,\n function_call: { name: toolChoice.toolName },\n toolWarnings,\n };\n }\n }\n\n const openaiTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict: boolean | undefined;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: structuredOutputs ? true : undefined,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2LogProbs,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompletionLogProbs } from './map-openai-completion-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAICompletionConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompletionModelId;\n readonly settings: OpenAICompletionSettings;\n\n private readonly config: OpenAICompletionConfig;\n\n constructor(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n tools,\n toolChoice,\n seed,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n logprobs: mapOpenAICompletionLogProbs(choice.logprobs),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV2LogProbs;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n\n const mappedLogprobs = mapOpenAICompletionLogProbs(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openaiErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { LanguageModelV2LogProbs } from '@ai-sdk/provider';\n\ntype OpenAICompletionLogProps = {\n tokens: string[];\n token_logprobs: number[];\n top_logprobs: Record<string, number>[] | null;\n};\n\nexport function mapOpenAICompletionLogProbs(\n logprobs: OpenAICompletionLogProps | null | undefined,\n): LanguageModelV2LogProbs | undefined {\n return logprobs?.tokens.map((token, index) => ({\n token,\n logprob: logprobs.token_logprobs[index],\n topLogprobs: logprobs.top_logprobs\n ? Object.entries(logprobs.top_logprobs[index]).map(\n ([token, logprob]) => ({\n token,\n logprob,\n }),\n )\n : [],\n }));\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\nimport { openaiFailedResponseHandler } from './openai-error';\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAIImageModelId,\n OpenAIImageSettings,\n modelMaxImagesPerCall,\n} from './openai-image-settings';\n\ninterface OpenAIImageModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class OpenAIImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return (\n this.settings.maxImagesPerCall ?? modelMaxImagesPerCall[this.modelId] ?? 1\n );\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAIImageModelId,\n private readonly settings: OpenAIImageSettings,\n private readonly config: OpenAIImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","export type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | (string & {});\n\n// https://platform.openai.com/docs/guides/images\nexport const modelMaxImagesPerCall: Record<OpenAIImageModelId, number> = {\n 'dall-e-3': 1,\n 'dall-e-2': 10,\n};\n\nexport interface OpenAIImageSettings {\n /**\nOverride the maximum number of images per call (default is dependent on the\nmodel, or 1 for an unknown model).\n */\n maxImagesPerCall?: number;\n}\n","import {\n TranscriptionModelV1,\n TranscriptionModelV1CallOptions,\n TranscriptionModelV1CallWarning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertBase64ToUint8Array,\n createJsonResponseHandler,\n parseProviderOptions,\n postFormDataToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAITranscriptionModelId,\n OpenAITranscriptionModelOptions,\n} from './openai-transcription-settings';\n\n// https://platform.openai.com/docs/api-reference/audio/createTranscription\nconst OpenAIProviderOptionsSchema = z.object({\n include: z\n .array(z.string())\n .optional()\n .describe(\n 'Additional information to include in the transcription response.',\n ),\n language: z\n .string()\n .optional()\n .describe('The language of the input audio in ISO-639-1 format.'),\n prompt: z\n .string()\n .optional()\n .describe(\n \"An optional text to guide the model's style or continue a previous audio segment.\",\n ),\n temperature: z\n .number()\n .min(0)\n .max(1)\n .optional()\n .default(0)\n .describe('The sampling temperature, between 0 and 1.'),\n timestampGranularities: z\n .array(z.enum(['word', 'segment']))\n .optional()\n .default(['segment'])\n .describe(\n 'The timestamp granularities to populate for this transcription.',\n ),\n});\n\nexport type OpenAITranscriptionCallOptions = Omit<\n TranscriptionModelV1CallOptions,\n 'providerOptions'\n> & {\n providerOptions?: {\n openai?: z.infer<typeof OpenAIProviderOptionsSchema>;\n };\n};\n\ninterface OpenAITranscriptionModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\n// https://platform.openai.com/docs/guides/speech-to-text#supported-languages\nconst languageMap = {\n afrikaans: 'af',\n arabic: 'ar',\n armenian: 'hy',\n azerbaijani: 'az',\n belarusian: 'be',\n bosnian: 'bs',\n bulgarian: 'bg',\n catalan: 'ca',\n chinese: 'zh',\n croatian: 'hr',\n czech: 'cs',\n danish: 'da',\n dutch: 'nl',\n english: 'en',\n estonian: 'et',\n finnish: 'fi',\n french: 'fr',\n galician: 'gl',\n german: 'de',\n greek: 'el',\n hebrew: 'he',\n hindi: 'hi',\n hungarian: 'hu',\n icelandic: 'is',\n indonesian: 'id',\n italian: 'it',\n japanese: 'ja',\n kannada: 'kn',\n kazakh: 'kk',\n korean: 'ko',\n latvian: 'lv',\n lithuanian: 'lt',\n macedonian: 'mk',\n malay: 'ms',\n marathi: 'mr',\n maori: 'mi',\n nepali: 'ne',\n norwegian: 'no',\n persian: 'fa',\n polish: 'pl',\n portuguese: 'pt',\n romanian: 'ro',\n russian: 'ru',\n serbian: 'sr',\n slovak: 'sk',\n slovenian: 'sl',\n spanish: 'es',\n swahili: 'sw',\n swedish: 'sv',\n tagalog: 'tl',\n tamil: 'ta',\n thai: 'th',\n turkish: 'tr',\n ukrainian: 'uk',\n urdu: 'ur',\n vietnamese: 'vi',\n welsh: 'cy',\n};\n\nexport class OpenAITranscriptionModel implements TranscriptionModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAITranscriptionModelId,\n private readonly config: OpenAITranscriptionModelConfig,\n ) {}\n\n private getArgs({\n audio,\n mediaType,\n providerOptions,\n }: OpenAITranscriptionCallOptions) {\n const warnings: TranscriptionModelV1CallWarning[] = [];\n\n // Parse provider options\n const openAIOptions = parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: OpenAIProviderOptionsSchema,\n });\n\n // Create form data with base fields\n const formData = new FormData();\n const blob =\n audio instanceof Uint8Array\n ? new Blob([audio])\n : new Blob([convertBase64ToUint8Array(audio)]);\n\n formData.append('model', this.modelId);\n formData.append('file', new File([blob], 'audio', { type: mediaType }));\n\n // Add provider-specific options\n if (openAIOptions) {\n const transcriptionModelOptions: OpenAITranscriptionModelOptions = {\n include: openAIOptions.include,\n language: openAIOptions.language,\n prompt: openAIOptions.prompt,\n temperature: openAIOptions.temperature,\n timestamp_granularities: openAIOptions.timestampGranularities,\n };\n\n for (const key in transcriptionModelOptions) {\n const value =\n transcriptionModelOptions[\n key as keyof OpenAITranscriptionModelOptions\n ];\n if (value !== undefined) {\n formData.append(key, value as string);\n }\n }\n }\n\n return {\n formData,\n warnings,\n };\n }\n\n async doGenerate(\n options: OpenAITranscriptionCallOptions,\n ): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { formData, warnings } = this.getArgs(options);\n\n const {\n value: response,\n responseHeaders,\n rawValue: rawResponse,\n } = await postFormDataToApi({\n url: this.config.url({\n path: '/audio/transcriptions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n formData,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTranscriptionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const language =\n response.language != null && response.language in languageMap\n ? languageMap[response.language as keyof typeof languageMap]\n : undefined;\n\n return {\n text: response.text,\n segments:\n response.words?.map(word => ({\n text: word.word,\n startSecond: word.start,\n endSecond: word.end,\n })) ?? [],\n language,\n durationInSeconds: response.duration ?? undefined,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n\nconst openaiTranscriptionResponseSchema = z.object({\n text: z.string(),\n language: z.string().nullish(),\n duration: z.number().nullish(),\n words: z\n .array(\n z.object({\n word: z.string(),\n start: z.number(),\n end: z.number(),\n }),\n )\n .nullish(),\n});\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from '../openai-config';\nimport { openaiFailedResponseHandler } from '../openai-error';\nimport { convertToOpenAIResponsesMessages } from './convert-to-openai-responses-messages';\nimport { mapOpenAIResponseFinishReason } from './map-openai-responses-finish-reason';\nimport { prepareResponsesTools } from './openai-responses-prepare-tools';\nimport { OpenAIResponsesModelId } from './openai-responses-settings';\n\nexport class OpenAIResponsesLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: OpenAIResponsesModelId;\n\n private readonly config: OpenAIConfig;\n\n constructor(modelId: OpenAIResponsesModelId, config: OpenAIConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n maxTokens,\n temperature,\n stopSequences,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n seed,\n prompt,\n providerOptions,\n tools,\n toolChoice,\n responseFormat,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const modelConfig = getResponsesModelConfig(this.modelId);\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'stopSequences' });\n }\n\n const { messages, warnings: messageWarnings } =\n convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode: modelConfig.systemMessageMode,\n });\n\n warnings.push(...messageWarnings);\n\n const openaiOptions = parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiResponsesProviderOptionsSchema,\n });\n\n const isStrict = openaiOptions?.strictSchemas ?? true;\n\n const baseArgs = {\n model: this.modelId,\n input: messages,\n temperature,\n top_p: topP,\n max_output_tokens: maxTokens,\n\n ...(responseFormat?.type === 'json' && {\n text: {\n format:\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n strict: isStrict,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n schema: responseFormat.schema,\n }\n : { type: 'json_object' },\n },\n }),\n\n // provider options:\n metadata: openaiOptions?.metadata,\n parallel_tool_calls: openaiOptions?.parallelToolCalls,\n previous_response_id: openaiOptions?.previousResponseId,\n store: openaiOptions?.store,\n user: openaiOptions?.user,\n instructions: openaiOptions?.instructions,\n\n // model-specific settings:\n ...(modelConfig.isReasoningModel &&\n openaiOptions?.reasoningEffort != null && {\n reasoning: { effort: openaiOptions?.reasoningEffort },\n }),\n ...(modelConfig.requiredAutoTruncation && {\n truncation: 'auto',\n }),\n };\n\n if (modelConfig.isReasoningModel) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareResponsesTools({\n tools,\n toolChoice,\n strict: isStrict,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n output: z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n role: z.literal('assistant'),\n content: z.array(\n z.object({\n type: z.literal('output_text'),\n text: z.string(),\n annotations: z.array(\n z.object({\n type: z.literal('url_citation'),\n start_index: z.number(),\n end_index: z.number(),\n url: z.string(),\n title: z.string(),\n }),\n ),\n }),\n ),\n }),\n z.object({\n type: z.literal('function_call'),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n z.object({\n type: z.literal('web_search_call'),\n }),\n z.object({\n type: z.literal('computer_call'),\n }),\n z.object({\n type: z.literal('reasoning'),\n }),\n ]),\n ),\n incomplete_details: z.object({ reason: z.string() }).nullable(),\n usage: usageSchema,\n }),\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const outputTextElements = response.output\n .filter(output => output.type === 'message')\n .flatMap(output => output.content)\n .filter(content => content.type === 'output_text');\n\n const toolCalls = response.output\n .filter(output => output.type === 'function_call')\n .map(output => ({\n toolCallType: 'function' as const,\n toolCallId: output.call_id,\n toolName: output.name,\n args: output.arguments,\n }));\n\n return {\n text: outputTextElements.map(content => content.text).join('\\n'),\n sources: outputTextElements.flatMap(content =>\n content.annotations.map(annotation => ({\n sourceType: 'url',\n id: this.config.generateId?.() ?? generateId(),\n url: annotation.url,\n title: annotation.title,\n })),\n ),\n finishReason: mapOpenAIResponseFinishReason({\n finishReason: response.incomplete_details?.reason,\n hasToolCalls: toolCalls.length > 0,\n }),\n toolCalls: toolCalls.length > 0 ? toolCalls : undefined,\n usage: {\n promptTokens: response.usage.input_tokens,\n completionTokens: response.usage.output_tokens,\n },\n request: { body },\n response: {\n id: response.id,\n timestamp: new Date(response.created_at * 1000),\n modelId: response.model,\n headers: responseHeaders,\n body: rawResponse,\n },\n providerMetadata: {\n openai: {\n responseId: response.id,\n cachedPromptTokens:\n response.usage.input_tokens_details?.cached_tokens ?? null,\n reasoningTokens:\n response.usage.output_tokens_details?.reasoning_tokens ?? null,\n },\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...body,\n stream: true,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiResponsesChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const self = this;\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let promptTokens = NaN;\n let completionTokens = NaN;\n let cachedPromptTokens: number | null = null;\n let reasoningTokens: number | null = null;\n let responseId: string | null = null;\n const ongoingToolCalls: Record<\n number,\n { toolName: string; toolCallId: string } | undefined\n > = {};\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiResponsesChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isResponseOutputItemAddedChunk(value)) {\n if (value.item.type === 'function_call') {\n ongoingToolCalls[value.output_index] = {\n toolName: value.item.name,\n toolCallId: value.item.call_id,\n };\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n argsTextDelta: value.item.arguments,\n });\n }\n } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {\n const toolCall = ongoingToolCalls[value.output_index];\n\n if (toolCall != null) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: value.delta,\n });\n }\n } else if (isResponseCreatedChunk(value)) {\n responseId = value.response.id;\n controller.enqueue({\n type: 'response-metadata',\n id: value.response.id,\n timestamp: new Date(value.response.created_at * 1000),\n modelId: value.response.model,\n });\n } else if (isTextDeltaChunk(value)) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: value.delta,\n });\n } else if (\n isResponseOutputItemDoneChunk(value) &&\n value.item.type === 'function_call'\n ) {\n ongoingToolCalls[value.output_index] = undefined;\n hasToolCalls = true;\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n args: value.item.arguments,\n });\n } else if (isResponseFinishedChunk(value)) {\n finishReason = mapOpenAIResponseFinishReason({\n finishReason: value.response.incomplete_details?.reason,\n hasToolCalls,\n });\n promptTokens = value.response.usage.input_tokens;\n completionTokens = value.response.usage.output_tokens;\n cachedPromptTokens =\n value.response.usage.input_tokens_details?.cached_tokens ??\n cachedPromptTokens;\n reasoningTokens =\n value.response.usage.output_tokens_details?.reasoning_tokens ??\n reasoningTokens;\n } else if (isResponseAnnotationAddedChunk(value)) {\n controller.enqueue({\n type: 'source',\n source: {\n sourceType: 'url',\n id: self.config.generateId?.() ?? generateId(),\n url: value.annotation.url,\n title: value.annotation.title,\n },\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: { promptTokens, completionTokens },\n ...((cachedPromptTokens != null || reasoningTokens != null) && {\n providerMetadata: {\n openai: {\n responseId,\n cachedPromptTokens,\n reasoningTokens,\n },\n },\n }),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst usageSchema = z.object({\n input_tokens: z.number(),\n input_tokens_details: z\n .object({ cached_tokens: z.number().nullish() })\n .nullish(),\n output_tokens: z.number(),\n output_tokens_details: z\n .object({ reasoning_tokens: z.number().nullish() })\n .nullish(),\n});\n\nconst textDeltaChunkSchema = z.object({\n type: z.literal('response.output_text.delta'),\n delta: z.string(),\n});\n\nconst responseFinishedChunkSchema = z.object({\n type: z.enum(['response.completed', 'response.incomplete']),\n response: z.object({\n incomplete_details: z.object({ reason: z.string() }).nullish(),\n usage: usageSchema,\n }),\n});\n\nconst responseCreatedChunkSchema = z.object({\n type: z.literal('response.created'),\n response: z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n }),\n});\n\nconst responseOutputItemDoneSchema = z.object({\n type: z.literal('response.output_item.done'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n status: z.literal('completed'),\n }),\n ]),\n});\n\nconst responseFunctionCallArgumentsDeltaSchema = z.object({\n type: z.literal('response.function_call_arguments.delta'),\n item_id: z.string(),\n output_index: z.number(),\n delta: z.string(),\n});\n\nconst responseOutputItemAddedSchema = z.object({\n type: z.literal('response.output_item.added'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n ]),\n});\n\nconst responseAnnotationAddedSchema = z.object({\n type: z.literal('response.output_text.annotation.added'),\n annotation: z.object({\n type: z.literal('url_citation'),\n url: z.string(),\n title: z.string(),\n }),\n});\n\nconst openaiResponsesChunkSchema = z.union([\n textDeltaChunkSchema,\n responseFinishedChunkSchema,\n responseCreatedChunkSchema,\n responseOutputItemDoneSchema,\n responseFunctionCallArgumentsDeltaSchema,\n responseOutputItemAddedSchema,\n responseAnnotationAddedSchema,\n z.object({ type: z.string() }).passthrough(), // fallback for unknown chunks\n]);\n\nfunction isTextDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof textDeltaChunkSchema> {\n return chunk.type === 'response.output_text.delta';\n}\n\nfunction isResponseOutputItemDoneChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemDoneSchema> {\n return chunk.type === 'response.output_item.done';\n}\n\nfunction isResponseFinishedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFinishedChunkSchema> {\n return (\n chunk.type === 'response.completed' || chunk.type === 'response.incomplete'\n );\n}\n\nfunction isResponseCreatedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseCreatedChunkSchema> {\n return chunk.type === 'response.created';\n}\n\nfunction isResponseFunctionCallArgumentsDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFunctionCallArgumentsDeltaSchema> {\n return chunk.type === 'response.function_call_arguments.delta';\n}\n\nfunction isResponseOutputItemAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemAddedSchema> {\n return chunk.type === 'response.output_item.added';\n}\n\nfunction isResponseAnnotationAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseAnnotationAddedSchema> {\n return chunk.type === 'response.output_text.annotation.added';\n}\n\ntype ResponsesModelConfig = {\n isReasoningModel: boolean;\n systemMessageMode: 'remove' | 'system' | 'developer';\n requiredAutoTruncation: boolean;\n};\n\nfunction getResponsesModelConfig(modelId: string): ResponsesModelConfig {\n // o series reasoning models:\n if (modelId.startsWith('o')) {\n if (modelId.startsWith('o1-mini') || modelId.startsWith('o1-preview')) {\n return {\n isReasoningModel: true,\n systemMessageMode: 'remove',\n requiredAutoTruncation: false,\n };\n }\n\n return {\n isReasoningModel: true,\n systemMessageMode: 'developer',\n requiredAutoTruncation: false,\n };\n }\n\n // gpt models:\n return {\n isReasoningModel: false,\n systemMessageMode: 'system',\n requiredAutoTruncation: false,\n };\n}\n\nconst openaiResponsesProviderOptionsSchema = z.object({\n metadata: z.any().nullish(),\n parallelToolCalls: z.boolean().nullish(),\n previousResponseId: z.string().nullish(),\n store: z.boolean().nullish(),\n user: z.string().nullish(),\n reasoningEffort: z.string().nullish(),\n strictSchemas: z.boolean().nullish(),\n instructions: z.string().nullish(),\n});\n\nexport type OpenAIResponsesProviderOptions = z.infer<\n typeof openaiResponsesProviderOptionsSchema\n>;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesPrompt } from './openai-responses-api-types';\n\nexport function convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode,\n}: {\n prompt: LanguageModelV2Prompt;\n systemMessageMode: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIResponsesPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIResponsesPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'input_text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'input_image',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n };\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n // The AI SDK automatically downloads files for user file parts with URLs\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'input_file',\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n messages.push({\n role: 'assistant',\n content: [{ type: 'output_text', text: part.text }],\n });\n break;\n }\n case 'tool-call': {\n messages.push({\n type: 'function_call',\n call_id: part.toolCallId,\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n });\n break;\n }\n }\n }\n\n break;\n }\n\n case 'tool': {\n for (const part of content) {\n messages.push({\n type: 'function_call_output',\n call_id: part.toolCallId,\n output: JSON.stringify(part.result),\n });\n }\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIResponseFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case undefined:\n case null:\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'max_output_tokens':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n default:\n return hasToolCalls ? 'tool-calls' : 'unknown';\n }\n}\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesTool } from './openai-responses-api-types';\n\nexport function prepareResponsesTools({\n tools,\n toolChoice,\n strict,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n strict: boolean;\n}): {\n tools?: Array<OpenAIResponsesTool>;\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'web_search_preview' }\n | { type: 'function'; name: string };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiTools: Array<OpenAIResponsesTool> = [];\n\n for (const tool of tools) {\n switch (tool.type) {\n case 'function':\n openaiTools.push({\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: strict ? true : undefined,\n });\n break;\n case 'provider-defined':\n switch (tool.id) {\n case 'openai.web_search_preview':\n openaiTools.push({\n type: 'web_search_preview',\n search_context_size: tool.args.searchContextSize as\n | 'low'\n | 'medium'\n | 'high',\n user_location: tool.args.userLocation as {\n type: 'approximate';\n city: string;\n region: string;\n },\n });\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice:\n toolChoice.toolName === 'web_search_preview'\n ? { type: 'web_search_preview' }\n : { type: 'function', name: toolChoice.toolName },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACpBlB,sBAIO;AAGA,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA,2BAA2B;AAAA,EAC3B,oBAAoB;AACtB,GAOE;AACA,QAAM,WAA6B,CAAC;AACpC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AA3DhD;AA4DY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA;AAAA,sBAG3C,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,oBACxC;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,UAAU,WAAW,QAAQ,GAAG;AAC9C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,0BAAQ,KAAK,WAAW;AAAA,oBACtB,KAAK,aAAa;AAChB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,sBAChD;AAAA,oBACF;AAAA,oBACA,KAAK;AAAA,oBACL,KAAK,cAAc;AACjB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,sBAChD;AAAA,oBACF;AAAA,oBAEA,SAAS;AACP,4BAAM,IAAI,8CAA8B;AAAA,wBACtC,eAAe,uCAAuC,KAAK,SAAS;AAAA,sBACtE,CAAC;AAAA,oBACH;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,MAAM;AAAA,sBACJ,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,sBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,oBACrD;AAAA,kBACF;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,0BAA0B;AAC5B,cAAI,UAAU,SAAS,GAAG;AACxB,kBAAM,IAAI,8CAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,eACE,UAAU,SAAS,IAAI,UAAU,CAAC,EAAE,WAAW;AAAA,UACnD,CAAC;AAAA,QACH,OAAO;AACL,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UACjD,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,cAAI,0BAA0B;AAC5B,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,aAAa;AAAA,cACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH,OAAO;AACL,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,cAAc,aAAa;AAAA,cAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;ACzMO,SAAS,4BACd,UACqC;AAnBvC;AAoBE,UACE,gDAAU,YAAV,mBAAmB,IAAI,CAAC,EAAE,OAAO,SAAS,aAAa,OAAO;AAAA,IAC5D;AAAA,IACA;AAAA,IACA,aAAa,eACT,aAAa,IAAI,CAAC,EAAE,OAAAC,QAAO,SAAAC,SAAQ,OAAO;AAAA,MACxC,OAAAD;AAAA,MACA,SAAAC;AAAA,IACF,EAAE,IACF,CAAC;AAAA,EACP,QATA,YASO;AAEX;;;AC9BO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAClB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACrBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA,IAAAC,mBAKO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA,2BAA2B;AAAA,EAC3B;AACF,GA6BE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,MAAI,0BAA0B;AAC5B,UAAM,kBAID,CAAC;AAEN,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,wBAAgB,KAAK;AAAA,UACnB,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,cAAc,MAAM;AACtB,aAAO;AAAA,QACL,WAAW;AAAA,QACX,eAAe;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,UAAMC,QAAO,WAAW;AAExB,YAAQA,OAAM;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe;AAAA,UACf;AAAA,QACF;AAAA,MACF,KAAK;AACH,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AACE,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe,EAAE,MAAM,WAAW,SAAS;AAAA,UAC3C;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,cAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,kBAAY,KAAK;AAAA,QACf,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,oBAAoB,OAAO;AAAA,QACrC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANrHO,IAAM,0BAAN,MAAyD;AAAA,EAQ9D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAY9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,4BAAqC;AA1D3C;AA8DI,YAAO,UAAK,SAAS,sBAAd,YAAmC,iBAAiB,KAAK,OAAO;AAAA,EACzE;AAAA,EAEA,IAAI,8BAA8B;AAEhC,QAAI,aAAa,KAAK,OAAO,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,KAAK,4BAA4B,SAAS;AAAA,EACnD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,oBAA6B;AAE/B,WAAO,CAAC,KAAK,SAAS;AAAA,EACxB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAjGnD;AAkGI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,2BAA2B,KAAK,SAAS;AAE/C,QAAI,4BAA4B,KAAK,SAAS,sBAAsB,MAAM;AACxE,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,QAAI,4BAA4B,KAAK,2BAA2B;AAC9D,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAAI;AAAA,MAC9C;AAAA,QACE;AAAA,QACA;AAAA,QACA,mBAAmB,qBAAqB,KAAK,OAAO;AAAA,MACtD;AAAA,IACF;AAEA,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,KAAK,SAAS,aAAa,QAC3B,OAAO,KAAK,SAAS,aAAa,WAC9B,OACA;AAAA,MACN,cACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAChC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACR,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA;AAAA,MAElB,kBACE,iDAAgB,UAAS,SACrB,KAAK,6BAA6B,eAAe,UAAU,OACzD;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,QAAQ;AAAA,UACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MACN,MAAM;AAAA,MACN;AAAA;AAAA;AAAA,MAIA,wBAAuB,wDAAiB,WAAjB,mBAAyB;AAAA,MAChD,QAAO,wDAAiB,WAAjB,mBAAyB;AAAA,MAChC,WAAU,wDAAiB,WAAjB,mBAAyB;AAAA,MACnC,aAAY,wDAAiB,WAAjB,mBAAyB;AAAA,MACrC,mBACE,8DAAiB,WAAjB,mBAAyB,oBAAzB,YACA,KAAK,SAAS;AAAA;AAAA,MAGhB;AAAA,IACF;AAEA,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAGlC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,qBAAqB,MAAM;AACtC,iBAAS,oBAAoB;AAC7B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,oBAAoB,MAAM;AACrC,iBAAS,mBAAmB;AAC5B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,cAAc,MAAM;AAC/B,iBAAS,aAAa;AACtB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,YAAY,MAAM;AAC7B,iBAAS,WAAW;AACpB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,gBAAgB,MAAM;AACjC,iBAAS,eAAe;AACxB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAGA,UAAI,SAAS,cAAc,MAAM;AAC/B,YAAI,SAAS,yBAAyB,MAAM;AAC1C,mBAAS,wBAAwB,SAAS;AAAA,QAC5C;AACA,iBAAS,aAAa;AAAA,MACxB;AAAA,IACF;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB,KAAK;AAAA,IAC1B,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,QACb;AAAA,QACA;AAAA,MACF;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAzSjE;AA0SI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAGjC,UAAM,0BAAyB,cAAS,UAAT,mBAAgB;AAC/C,UAAM,sBAAqB,cAAS,UAAT,mBAAgB;AAC3C,UAAM,mBAAoD,EAAE,QAAQ,CAAC,EAAE;AACvE,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,OAAO,kBACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,OAAO,qBACtB,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,WACE,KAAK,SAAS,4BAA4B,OAAO,QAAQ,gBACrD;AAAA,QACE;AAAA,UACE,cAAc;AAAA,UACd,gBAAY,mCAAW;AAAA,UACvB,UAAU,OAAO,QAAQ,cAAc;AAAA,UACvC,MAAM,OAAO,QAAQ,cAAc;AAAA,QACrC;AAAA,MACF,KACA,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAnWpD,YAAAC;AAmWwD;AAAA,UAC1C,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACN,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,MACA,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAE5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,SAAS;AAAA,gBACrB,UAAU,SAAS;AAAA,gBACnB,eAAe,SAAS;AAAA,cAC1B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,UAAM,EAAE,yBAAyB,IAAI,KAAK;AAE1C,UAAM,mBAAoD,EAAE,QAAQ,CAAC,EAAE;AAEvE,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAxevC;AA0eY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,sBAAQ;AAAA,gBACN,cAAc,wCAAiB;AAAA,gBAC/B,kBAAkB,gDAAqB;AAAA,cACzC;AAEA,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,iCAAiB,OAAO,kBACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,iCAAiB,OAAO,qBACtB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAEA,kBAAM,kBACJ,4BAA4B,MAAM,iBAAiB,OAC/C;AAAA,cACE;AAAA,gBACE,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,OAAO;AAAA,cACT;AAAA,YACF,IACA,MAAM;AAEZ,gBAAI,mBAAmB,MAAM;AAC3B,yBAAW,iBAAiB,iBAAiB;AAC3C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AA3rB5B;AA4rBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,yBAAyB,cAC5B,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,eAAe,cACZ,OAAO;AAAA,UACN,WAAW,cAAE,OAAO;AAAA,UACpB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC,EACA,QAAQ;AAAA,QACX,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,UAAU,cACP,OAAO;AAAA,QACN,SAAS,cACN;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,SAAS,cAAE,OAAO;AAAA,YAClB,cAAc,cAAE;AAAA,cACd,cAAE,OAAO;AAAA,gBACP,OAAO,cAAE,OAAO;AAAA,gBAChB,SAAS,cAAE,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,QAAQ;AAAA,MACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,eAAe,cACZ,OAAO;AAAA,YACN,MAAM,cAAE,OAAO,EAAE,SAAS;AAAA,YAC1B,WAAW,cAAE,OAAO,EAAE,SAAS;AAAA,UACjC,CAAC,EACA,QAAQ;AAAA,UACX,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,UAAU,cACP,OAAO;AAAA,UACN,SAAS,cACN;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,SAAS,cAAE,OAAO;AAAA,cAClB,cAAc,cAAE;AAAA,gBACd,cAAE,OAAO;AAAA,kBACP,OAAO,cAAE,OAAO;AAAA,kBAChB,SAAS,cAAE,OAAO;AAAA,gBACpB,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH,EACC,SAAS;AAAA,QACd,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SACE,YAAY,QACZ,QAAQ,WAAW,KAAK,KACxB,YAAY,QACZ,QAAQ,WAAW,KAAK;AAE5B;AAEA,SAAS,aAAa,SAAiB;AACrC,SAAO,QAAQ,WAAW,sBAAsB;AAClD;AAEA,SAAS,qBAAqB,SAAiB;AAp2B/C;AAq2BE,MAAI,CAAC,iBAAiB,OAAO,GAAG;AAC9B,WAAO;AAAA,EACT;AAEA,UACE,2BAAgB,OAAuC,MAAvD,mBACI,sBADJ,YACyB;AAE7B;AAEA,IAAM,kBAAkB;AAAA,EACtB,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AAAA,EACA,cAAc;AAAA,IACZ,mBAAmB;AAAA,EACrB;AAAA,EACA,yBAAyB;AAAA,IACvB,mBAAmB;AAAA,EACrB;AAAA,EACA,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AACF;;;AO33BA,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACflB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACjGO,SAAS,4BACd,UACqC;AACrC,SAAO,qCAAU,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,IAC7C;AAAA,IACA,SAAS,SAAS,eAAe,KAAK;AAAA,IACtC,aAAa,SAAS,eAClB,OAAO,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE;AAAA,MAC3C,CAAC,CAACC,QAAO,OAAO,OAAO;AAAA,QACrB,OAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF,IACA,CAAC;AAAA,EACP;AACF;;;AFcO,IAAM,gCAAN,MAA+D;AAAA,EASpE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA,QACpB,YAAY,KAAK,SAAS;AAAA,QAC1B,UACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAChC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,QACR,QAAQ,KAAK,SAAS;AAAA,QACtB,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA;AAAA,QAGA,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,MACxB,UAAU,cACP,OAAO;AAAA,QACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,MACnE,CAAC,EACA,QAAQ;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,QAChB,UAAU,cACP,OAAO;AAAA,UACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,QACnE,CAAC,EACA,QAAQ;AAAA,MACb,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AGlWD,IAAAC,mBAGO;AACP,IAAAC,yBAIO;AACP,IAAAC,cAAkB;AAQX,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AA5BrC;AA6BI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAhCvC;AAiCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ACjGD,IAAAC,yBAIO;AACP,IAAAC,cAAkB;;;ACHX,IAAM,wBAA4D;AAAA,EACvE,YAAY;AAAA,EACZ,YAAY;AACd;;;ADeO,IAAM,mBAAN,MAA+C;AAAA,EAapD,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAfnB,SAAS,uBAAuB;AAAA,EAgB7B;AAAA,EAdH,IAAI,mBAA2B;AAxBjC;AAyBI,YACE,gBAAK,SAAS,qBAAd,YAAkC,sBAAsB,KAAK,OAAO,MAApE,YAAyE;AAAA,EAE7E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAnDJ;AAoDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AErGD,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AASlB,IAAM,8BAA8B,cAAE,OAAO;AAAA,EAC3C,SAAS,cACN,MAAM,cAAE,OAAO,CAAC,EAChB,SAAS,EACT;AAAA,IACC;AAAA,EACF;AAAA,EACF,UAAU,cACP,OAAO,EACP,SAAS,EACT,SAAS,sDAAsD;AAAA,EAClE,QAAQ,cACL,OAAO,EACP,SAAS,EACT;AAAA,IACC;AAAA,EACF;AAAA,EACF,aAAa,cACV,OAAO,EACP,IAAI,CAAC,EACL,IAAI,CAAC,EACL,SAAS,EACT,QAAQ,CAAC,EACT,SAAS,4CAA4C;AAAA,EACxD,wBAAwB,cACrB,MAAM,cAAE,KAAK,CAAC,QAAQ,SAAS,CAAC,CAAC,EACjC,SAAS,EACT,QAAQ,CAAC,SAAS,CAAC,EACnB;AAAA,IACC;AAAA,EACF;AACJ,CAAC;AAkBD,IAAM,cAAc;AAAA,EAClB,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,MAAM;AAAA,EACN,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,YAAY;AAAA,EACZ,OAAO;AACT;AAEO,IAAM,2BAAN,MAA+D;AAAA,EAOpE,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAmC;AACjC,UAAM,WAA8C,CAAC;AAGrD,UAAM,oBAAgB,6CAAqB;AAAA,MACzC,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,WAAW,IAAI,SAAS;AAC9B,UAAM,OACJ,iBAAiB,aACb,IAAI,KAAK,CAAC,KAAK,CAAC,IAChB,IAAI,KAAK,KAAC,kDAA0B,KAAK,CAAC,CAAC;AAEjD,aAAS,OAAO,SAAS,KAAK,OAAO;AACrC,aAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,IAAI,GAAG,SAAS,EAAE,MAAM,UAAU,CAAC,CAAC;AAGtE,QAAI,eAAe;AACjB,YAAM,4BAA6D;AAAA,QACjE,SAAS,cAAc;AAAA,QACvB,UAAU,cAAc;AAAA,QACxB,QAAQ,cAAc;AAAA,QACtB,aAAa,cAAc;AAAA,QAC3B,yBAAyB,cAAc;AAAA,MACzC;AAEA,iBAAW,OAAO,2BAA2B;AAC3C,cAAM,QACJ,0BACE,GACF;AACF,YAAI,UAAU,QAAW;AACvB,mBAAS,OAAO,KAAK,KAAe;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SACkE;AAnMtE;AAoMI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,UAAU,SAAS,IAAI,KAAK,QAAQ,OAAO;AAEnD,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,0CAAkB;AAAA,MAC1B,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,WACJ,SAAS,YAAY,QAAQ,SAAS,YAAY,cAC9C,YAAY,SAAS,QAAoC,IACzD;AAEN,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,WACE,oBAAS,UAAT,mBAAgB,IAAI,WAAS;AAAA,QAC3B,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,WAAW,KAAK;AAAA,MAClB,QAJA,YAIO,CAAC;AAAA,MACV;AAAA,MACA,oBAAmB,cAAS,aAAT,YAAqB;AAAA,MACxC;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,OAAO;AAAA,EACf,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,OAAO,cACJ;AAAA,IACC,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,OAAO,cAAE,OAAO;AAAA,MAChB,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;;;AC3PD,IAAAC,yBAQO;AACP,IAAAC,cAAkB;;;ACflB,IAAAC,mBAIO;AAGA,SAAS,iCAAiC;AAAA,EAC/C;AAAA,EACA;AACF,GAME;AACA,QAAM,WAAkC,CAAC;AACzC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AApDhD;AAqDY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,cAAc,MAAM,KAAK,KAAK;AAAA,cAC/C;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA;AAAA,oBAG3C,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,kBACxC;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAE5B,0BAAM,IAAI,+CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,oBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,kBACrD;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,CAAC,EAAE,MAAM,eAAe,MAAM,KAAK,KAAK,CAAC;AAAA,cACpD,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,KAAK;AAAA,gBACd,MAAM,KAAK;AAAA,gBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,cACrC,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,QAAQ,SAAS;AAC1B,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,KAAK;AAAA,YACd,QAAQ,KAAK,UAAU,KAAK,MAAM;AAAA,UACpC,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;AC/IO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO,eAAe,eAAe;AAAA,EACzC;AACF;;;ACpBA,IAAAC,mBAIO;AAGA,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AACF,GAaE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,cAA0C,CAAC;AAEjD,aAAW,QAAQ,OAAO;AACxB,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,oBAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,SAAS,OAAO;AAAA,QAC1B,CAAC;AACD;AAAA,MACF,KAAK;AACH,gBAAQ,KAAK,IAAI;AAAA,UACf,KAAK;AACH,wBAAY,KAAK;AAAA,cACf,MAAM;AAAA,cACN,qBAAqB,KAAK,KAAK;AAAA,cAI/B,eAAe,KAAK,KAAK;AAAA,YAK3B,CAAC;AACD;AAAA,UACF;AACE,yBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,QACJ;AACA;AAAA,MACF;AACE,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,IACJ;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YACE,WAAW,aAAa,uBACpB,EAAE,MAAM,qBAAqB,IAC7B,EAAE,MAAM,YAAY,MAAM,WAAW,SAAS;AAAA,QACpD;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AH9EO,IAAM,+BAAN,MAA8D;AAAA,EAQnE,YAAY,SAAiC,QAAsB;AAPnE,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAOrC,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAtDnD;AAuDI,UAAM,WAAyC,CAAC;AAChD,UAAM,cAAc,wBAAwB,KAAK,OAAO;AAExD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,gBAAgB,CAAC;AAAA,IACzE;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAC1C,iCAAiC;AAAA,MAC/B;AAAA,MACA,mBAAmB,YAAY;AAAA,IACjC,CAAC;AAEH,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,oBAAgB,6CAAqB;AAAA,MACzC,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,YAAW,oDAAe,kBAAf,YAAgC;AAEjD,UAAM,WAAW;AAAA,MACf,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MAEnB,IAAI,iDAAgB,UAAS,UAAU;AAAA,QACrC,MAAM;AAAA,UACJ,QACE,eAAe,UAAU,OACrB;AAAA,YACE,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,YAC5B,QAAQ,eAAe;AAAA,UACzB,IACA,EAAE,MAAM,cAAc;AAAA,QAC9B;AAAA,MACF;AAAA;AAAA,MAGA,UAAU,+CAAe;AAAA,MACzB,qBAAqB,+CAAe;AAAA,MACpC,sBAAsB,+CAAe;AAAA,MACrC,OAAO,+CAAe;AAAA,MACtB,MAAM,+CAAe;AAAA,MACrB,cAAc,+CAAe;AAAA;AAAA,MAG7B,GAAI,YAAY,qBACd,+CAAe,oBAAmB,QAAQ;AAAA,QACxC,WAAW,EAAE,QAAQ,+CAAe,gBAAgB;AAAA,MACtD;AAAA,MACF,GAAI,YAAY,0BAA0B;AAAA,QACxC,YAAY;AAAA,MACd;AAAA,IACF;AAEA,QAAI,YAAY,kBAAkB;AAGhC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAEA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,sBAAsB;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxLjE;AAyLI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB,cAAE,OAAO;AAAA,UACP,IAAI,cAAE,OAAO;AAAA,UACb,YAAY,cAAE,OAAO;AAAA,UACrB,OAAO,cAAE,OAAO;AAAA,UAChB,QAAQ,cAAE;AAAA,YACR,cAAE,mBAAmB,QAAQ;AAAA,cAC3B,cAAE,OAAO;AAAA,gBACP,MAAM,cAAE,QAAQ,SAAS;AAAA,gBACzB,MAAM,cAAE,QAAQ,WAAW;AAAA,gBAC3B,SAAS,cAAE;AAAA,kBACT,cAAE,OAAO;AAAA,oBACP,MAAM,cAAE,QAAQ,aAAa;AAAA,oBAC7B,MAAM,cAAE,OAAO;AAAA,oBACf,aAAa,cAAE;AAAA,sBACb,cAAE,OAAO;AAAA,wBACP,MAAM,cAAE,QAAQ,cAAc;AAAA,wBAC9B,aAAa,cAAE,OAAO;AAAA,wBACtB,WAAW,cAAE,OAAO;AAAA,wBACpB,KAAK,cAAE,OAAO;AAAA,wBACd,OAAO,cAAE,OAAO;AAAA,sBAClB,CAAC;AAAA,oBACH;AAAA,kBACF,CAAC;AAAA,gBACH;AAAA,cACF,CAAC;AAAA,cACD,cAAE,OAAO;AAAA,gBACP,MAAM,cAAE,QAAQ,eAAe;AAAA,gBAC/B,SAAS,cAAE,OAAO;AAAA,gBAClB,MAAM,cAAE,OAAO;AAAA,gBACf,WAAW,cAAE,OAAO;AAAA,cACtB,CAAC;AAAA,cACD,cAAE,OAAO;AAAA,gBACP,MAAM,cAAE,QAAQ,iBAAiB;AAAA,cACnC,CAAC;AAAA,cACD,cAAE,OAAO;AAAA,gBACP,MAAM,cAAE,QAAQ,eAAe;AAAA,cACjC,CAAC;AAAA,cACD,cAAE,OAAO;AAAA,gBACP,MAAM,cAAE,QAAQ,WAAW;AAAA,cAC7B,CAAC;AAAA,YACH,CAAC;AAAA,UACH;AAAA,UACA,oBAAoB,cAAE,OAAO,EAAE,QAAQ,cAAE,OAAO,EAAE,CAAC,EAAE,SAAS;AAAA,UAC9D,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,qBAAqB,SAAS,OACjC,OAAO,YAAU,OAAO,SAAS,SAAS,EAC1C,QAAQ,YAAU,OAAO,OAAO,EAChC,OAAO,aAAW,QAAQ,SAAS,aAAa;AAEnD,UAAM,YAAY,SAAS,OACxB,OAAO,YAAU,OAAO,SAAS,eAAe,EAChD,IAAI,aAAW;AAAA,MACd,cAAc;AAAA,MACd,YAAY,OAAO;AAAA,MACnB,UAAU,OAAO;AAAA,MACjB,MAAM,OAAO;AAAA,IACf,EAAE;AAEJ,WAAO;AAAA,MACL,MAAM,mBAAmB,IAAI,aAAW,QAAQ,IAAI,EAAE,KAAK,IAAI;AAAA,MAC/D,SAAS,mBAAmB;AAAA,QAAQ,aAClC,QAAQ,YAAY,IAAI,gBAAW;AA3Q3C,cAAAC,KAAAC,KAAAC;AA2Q+C;AAAA,YACrC,YAAY;AAAA,YACZ,KAAIA,OAAAD,OAAAD,MAAA,KAAK,QAAO,eAAZ,gBAAAC,IAAA,KAAAD,SAAA,OAAAE,UAA8B,mCAAW;AAAA,YAC7C,KAAK,WAAW;AAAA,YAChB,OAAO,WAAW;AAAA,UACpB;AAAA,SAAE;AAAA,MACJ;AAAA,MACA,cAAc,8BAA8B;AAAA,QAC1C,eAAc,cAAS,uBAAT,mBAA6B;AAAA,QAC3C,cAAc,UAAU,SAAS;AAAA,MACnC,CAAC;AAAA,MACD,WAAW,UAAU,SAAS,IAAI,YAAY;AAAA,MAC9C,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,IAAI,SAAS;AAAA,QACb,WAAW,IAAI,KAAK,SAAS,aAAa,GAAI;AAAA,QAC9C,SAAS,SAAS;AAAA,QAClB,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,qBACE,oBAAS,MAAM,yBAAf,mBAAqC,kBAArC,YAAsD;AAAA,UACxD,kBACE,oBAAS,MAAM,0BAAf,mBAAsC,qBAAtC,YAA0D;AAAA,QAC9D;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,OAAO;AAEb,QAAI,eAA4C;AAChD,QAAI,eAAe;AACnB,QAAI,mBAAmB;AACvB,QAAI,qBAAoC;AACxC,QAAI,kBAAiC;AACrC,QAAI,aAA4B;AAChC,UAAM,mBAGF,CAAC;AACL,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA3VvC;AA6VY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,+BAA+B,KAAK,GAAG;AACzC,kBAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,iCAAiB,MAAM,YAAY,IAAI;AAAA,kBACrC,UAAU,MAAM,KAAK;AAAA,kBACrB,YAAY,MAAM,KAAK;AAAA,gBACzB;AAEA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,MAAM,KAAK;AAAA,kBACvB,UAAU,MAAM,KAAK;AAAA,kBACrB,eAAe,MAAM,KAAK;AAAA,gBAC5B,CAAC;AAAA,cACH;AAAA,YACF,WAAW,0CAA0C,KAAK,GAAG;AAC3D,oBAAM,WAAW,iBAAiB,MAAM,YAAY;AAEpD,kBAAI,YAAY,MAAM;AACpB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,MAAM;AAAA,gBACvB,CAAC;AAAA,cACH;AAAA,YACF,WAAW,uBAAuB,KAAK,GAAG;AACxC,2BAAa,MAAM,SAAS;AAC5B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,MAAM,SAAS;AAAA,gBACnB,WAAW,IAAI,KAAK,MAAM,SAAS,aAAa,GAAI;AAAA,gBACpD,SAAS,MAAM,SAAS;AAAA,cAC1B,CAAC;AAAA,YACH,WAAW,iBAAiB,KAAK,GAAG;AAClC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH,WACE,8BAA8B,KAAK,KACnC,MAAM,KAAK,SAAS,iBACpB;AACA,+BAAiB,MAAM,YAAY,IAAI;AACvC,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,MAAM,KAAK;AAAA,gBACvB,UAAU,MAAM,KAAK;AAAA,gBACrB,MAAM,MAAM,KAAK;AAAA,cACnB,CAAC;AAAA,YACH,WAAW,wBAAwB,KAAK,GAAG;AACzC,6BAAe,8BAA8B;AAAA,gBAC3C,eAAc,WAAM,SAAS,uBAAf,mBAAmC;AAAA,gBACjD;AAAA,cACF,CAAC;AACD,6BAAe,MAAM,SAAS,MAAM;AACpC,iCAAmB,MAAM,SAAS,MAAM;AACxC,oCACE,iBAAM,SAAS,MAAM,yBAArB,mBAA2C,kBAA3C,YACA;AACF,iCACE,iBAAM,SAAS,MAAM,0BAArB,mBAA4C,qBAA5C,YACA;AAAA,YACJ,WAAW,+BAA+B,KAAK,GAAG;AAChD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAQ;AAAA,kBACN,YAAY;AAAA,kBACZ,KAAI,sBAAK,QAAO,eAAZ,gDAA8B,mCAAW;AAAA,kBAC7C,KAAK,MAAM,WAAW;AAAA,kBACtB,OAAO,MAAM,WAAW;AAAA,gBAC1B;AAAA,cACF,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,EAAE,cAAc,iBAAiB;AAAA,cACxC,IAAK,sBAAsB,QAAQ,mBAAmB,SAAS;AAAA,gBAC7D,kBAAkB;AAAA,kBAChB,QAAQ;AAAA,oBACN;AAAA,oBACA;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,cAAc,cAAE,OAAO;AAAA,EACvB,sBAAsB,cACnB,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EAC9C,QAAQ;AAAA,EACX,eAAe,cAAE,OAAO;AAAA,EACxB,uBAAuB,cACpB,OAAO,EAAE,kBAAkB,cAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EACjD,QAAQ;AACb,CAAC;AAED,IAAM,uBAAuB,cAAE,OAAO;AAAA,EACpC,MAAM,cAAE,QAAQ,4BAA4B;AAAA,EAC5C,OAAO,cAAE,OAAO;AAClB,CAAC;AAED,IAAM,8BAA8B,cAAE,OAAO;AAAA,EAC3C,MAAM,cAAE,KAAK,CAAC,sBAAsB,qBAAqB,CAAC;AAAA,EAC1D,UAAU,cAAE,OAAO;AAAA,IACjB,oBAAoB,cAAE,OAAO,EAAE,QAAQ,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,IAC7D,OAAO;AAAA,EACT,CAAC;AACH,CAAC;AAED,IAAM,6BAA6B,cAAE,OAAO;AAAA,EAC1C,MAAM,cAAE,QAAQ,kBAAkB;AAAA,EAClC,UAAU,cAAE,OAAO;AAAA,IACjB,IAAI,cAAE,OAAO;AAAA,IACb,YAAY,cAAE,OAAO;AAAA,IACrB,OAAO,cAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,MAAM,cAAE,QAAQ,2BAA2B;AAAA,EAC3C,cAAc,cAAE,OAAO;AAAA,EACvB,MAAM,cAAE,mBAAmB,QAAQ;AAAA,IACjC,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,cAAE,OAAO;AAAA,MACb,SAAS,cAAE,OAAO;AAAA,MAClB,MAAM,cAAE,OAAO;AAAA,MACf,WAAW,cAAE,OAAO;AAAA,MACpB,QAAQ,cAAE,QAAQ,WAAW;AAAA,IAC/B,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,2CAA2C,cAAE,OAAO;AAAA,EACxD,MAAM,cAAE,QAAQ,wCAAwC;AAAA,EACxD,SAAS,cAAE,OAAO;AAAA,EAClB,cAAc,cAAE,OAAO;AAAA,EACvB,OAAO,cAAE,OAAO;AAClB,CAAC;AAED,IAAM,gCAAgC,cAAE,OAAO;AAAA,EAC7C,MAAM,cAAE,QAAQ,4BAA4B;AAAA,EAC5C,cAAc,cAAE,OAAO;AAAA,EACvB,MAAM,cAAE,mBAAmB,QAAQ;AAAA,IACjC,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,cAAE,OAAO;AAAA,MACb,SAAS,cAAE,OAAO;AAAA,MAClB,MAAM,cAAE,OAAO;AAAA,MACf,WAAW,cAAE,OAAO;AAAA,IACtB,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,gCAAgC,cAAE,OAAO;AAAA,EAC7C,MAAM,cAAE,QAAQ,uCAAuC;AAAA,EACvD,YAAY,cAAE,OAAO;AAAA,IACnB,MAAM,cAAE,QAAQ,cAAc;AAAA,IAC9B,KAAK,cAAE,OAAO;AAAA,IACd,OAAO,cAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,6BAA6B,cAAE,MAAM;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,EAAE,CAAC,EAAE,YAAY;AAAA;AAC7C,CAAC;AAED,SAAS,iBACP,OAC+C;AAC/C,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,8BACP,OACuD;AACvD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,wBACP,OACsD;AACtD,SACE,MAAM,SAAS,wBAAwB,MAAM,SAAS;AAE1D;AAEA,SAAS,uBACP,OACqD;AACrD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,0CACP,OACmE;AACnE,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAQA,SAAS,wBAAwB,SAAuC;AAEtE,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,YAAY,GAAG;AACrE,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,WAAO;AAAA,MACL,kBAAkB;AAAA,MAClB,mBAAmB;AAAA,MACnB,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAGA,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,wBAAwB;AAAA,EAC1B;AACF;AAEA,IAAM,uCAAuC,cAAE,OAAO;AAAA,EACpD,UAAU,cAAE,IAAI,EAAE,QAAQ;AAAA,EAC1B,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA,EACvC,oBAAoB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,OAAO,cAAE,QAAQ,EAAE,QAAQ;AAAA,EAC3B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,eAAe,cAAE,QAAQ,EAAE,QAAQ;AAAA,EACnC,cAAc,cAAE,OAAO,EAAE,QAAQ;AACnC,CAAC;","names":["import_provider","import_provider_utils","import_zod","token","logprob","import_provider","type","_a","toolCall","import_provider_utils","import_zod","import_provider","token","import_provider","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider","import_provider","_a","_b","_c"]}
|
|
@@ -614,8 +614,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
614
614
|
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
|
|
615
615
|
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
|
|
616
616
|
},
|
|
617
|
-
|
|
618
|
-
request: { body: JSON.stringify(body) },
|
|
617
|
+
request: { body },
|
|
619
618
|
response: {
|
|
620
619
|
...getResponseMetadata(response),
|
|
621
620
|
headers: responseHeaders,
|
|
@@ -665,7 +664,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
665
664
|
});
|
|
666
665
|
return {
|
|
667
666
|
stream: simulatedStream,
|
|
668
|
-
rawCall: result.rawCall,
|
|
669
667
|
response: result.response,
|
|
670
668
|
warnings: result.warnings
|
|
671
669
|
};
|
|
@@ -875,9 +873,8 @@ var OpenAIChatLanguageModel = class {
|
|
|
875
873
|
}
|
|
876
874
|
})
|
|
877
875
|
),
|
|
878
|
-
|
|
876
|
+
request: { body },
|
|
879
877
|
response: { headers: responseHeaders },
|
|
880
|
-
request: { body: JSON.stringify(body) },
|
|
881
878
|
warnings
|
|
882
879
|
};
|
|
883
880
|
}
|
|
@@ -1216,7 +1213,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1216
1213
|
abortSignal: options.abortSignal,
|
|
1217
1214
|
fetch: this.config.fetch
|
|
1218
1215
|
});
|
|
1219
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1220
1216
|
const choice = response.choices[0];
|
|
1221
1217
|
return {
|
|
1222
1218
|
text: choice.text,
|
|
@@ -1226,8 +1222,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1226
1222
|
},
|
|
1227
1223
|
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
1228
1224
|
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
1229
|
-
|
|
1230
|
-
request: { body: JSON.stringify(args) },
|
|
1225
|
+
request: { body: args },
|
|
1231
1226
|
response: {
|
|
1232
1227
|
...getResponseMetadata(response),
|
|
1233
1228
|
headers: responseHeaders,
|
|
@@ -1258,7 +1253,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1258
1253
|
abortSignal: options.abortSignal,
|
|
1259
1254
|
fetch: this.config.fetch
|
|
1260
1255
|
});
|
|
1261
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1262
1256
|
let finishReason = "unknown";
|
|
1263
1257
|
let usage = {
|
|
1264
1258
|
promptTokens: Number.NaN,
|
|
@@ -1322,7 +1316,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1322
1316
|
}
|
|
1323
1317
|
})
|
|
1324
1318
|
),
|
|
1325
|
-
rawCall: { rawPrompt, rawSettings },
|
|
1326
1319
|
response: { headers: responseHeaders },
|
|
1327
1320
|
warnings,
|
|
1328
1321
|
request: { body: JSON.stringify(body) }
|
|
@@ -2156,13 +2149,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2156
2149
|
promptTokens: response.usage.input_tokens,
|
|
2157
2150
|
completionTokens: response.usage.output_tokens
|
|
2158
2151
|
},
|
|
2159
|
-
|
|
2160
|
-
rawPrompt: void 0,
|
|
2161
|
-
rawSettings: {}
|
|
2162
|
-
},
|
|
2163
|
-
request: {
|
|
2164
|
-
body: JSON.stringify(body)
|
|
2165
|
-
},
|
|
2152
|
+
request: { body },
|
|
2166
2153
|
response: {
|
|
2167
2154
|
id: response.id,
|
|
2168
2155
|
timestamp: new Date(response.created_at * 1e3),
|
|
@@ -2306,11 +2293,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2306
2293
|
}
|
|
2307
2294
|
})
|
|
2308
2295
|
),
|
|
2309
|
-
|
|
2310
|
-
rawPrompt: void 0,
|
|
2311
|
-
rawSettings: {}
|
|
2312
|
-
},
|
|
2313
|
-
request: { body: JSON.stringify(body) },
|
|
2296
|
+
request: { body },
|
|
2314
2297
|
response: { headers: responseHeaders },
|
|
2315
2298
|
warnings
|
|
2316
2299
|
};
|