workers-ai-provider 0.7.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +16 -1
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
- package/src/streaming.ts +5 -0
- package/src/utils.ts +16 -0
- package/src/workersai-chat-language-model.ts +7 -5
package/dist/index.js
CHANGED
@@ -367,6 +367,17 @@ function processPartialToolCalls(partialToolCalls) {
|
|
367
367
|
const mergedToolCalls = mergePartialToolCalls(partialToolCalls);
|
368
368
|
return processToolCalls({ tool_calls: mergedToolCalls });
|
369
369
|
}
|
370
|
+
function processText(output) {
|
371
|
+
if (output?.choices?.[0]?.message?.content.length) {
|
372
|
+
return output?.choices?.[0]?.message?.content;
|
373
|
+
}
|
374
|
+
if ("response" in output) {
|
375
|
+
if (typeof output.response === "object" && output.response !== null) {
|
376
|
+
return JSON.stringify(output.response);
|
377
|
+
}
|
378
|
+
return output.response;
|
379
|
+
}
|
380
|
+
}
|
370
381
|
|
371
382
|
// src/streaming.ts
|
372
383
|
function getMappedStream(response) {
|
@@ -397,6 +408,10 @@ function getMappedStream(response) {
|
|
397
408
|
type: "reasoning",
|
398
409
|
textDelta: chunk.choices[0].delta.reasoning_content
|
399
410
|
});
|
411
|
+
chunk?.choices?.[0]?.delta?.content?.length && controller.enqueue({
|
412
|
+
type: "text-delta",
|
413
|
+
textDelta: chunk.choices[0].delta.content
|
414
|
+
});
|
400
415
|
}
|
401
416
|
if (partialToolCalls.length > 0) {
|
402
417
|
const toolCalls = processPartialToolCalls(partialToolCalls);
|
@@ -759,7 +774,7 @@ var WorkersAIChatLanguageModel = class {
|
|
759
774
|
finishReason: mapWorkersAIFinishReason(output),
|
760
775
|
rawCall: { rawPrompt: messages, rawSettings: args },
|
761
776
|
rawResponse: { body: output },
|
762
|
-
text:
|
777
|
+
text: processText(output),
|
763
778
|
toolCalls: processToolCalls(output),
|
764
779
|
// @ts-ignore: Missing types
|
765
780
|
reasoning: output?.choices?.[0]?.message?.reasoning_content,
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/autorag-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../src/map-workersai-usage.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/utils.ts","../src/streaming.ts","../src/workers-ai-embedding-model.ts","../src/workersai-chat-language-model.ts","../src/map-workersai-finish-reason.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype AutoRAGChatConfig = {\n\tprovider: string;\n\tbinding: AutoRAG;\n\tgateway?: GatewayOptions;\n};\n\nexport class AutoRAGChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: AutoRAGChatSettings;\n\n\tprivate readonly config: AutoRAGChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: AutoRAGChatSettings,\n\t\tconfig: AutoRAGChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ function: mode.tool, type: \"function\" }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst output = await this.config.binding.aiSearch({\n\t\t\tquery: messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\"),\n\t\t});\n\n\t\treturn {\n\t\t\tfinishReason: \"stop\",\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tsources: output.data.map(({ file_id, filename, score }) => ({\n\t\t\t\tid: file_id,\n\t\t\t\tproviderMetadata: {\n\t\t\t\t\tattributes: { score },\n\t\t\t\t},\n\t\t\t\tsourceType: \"url\",\n\t\t\t\turl: filename,\n\t\t\t})), // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\ttext: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst query = messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\");\n\n\t\tconst response = await this.config.binding.aiSearch({\n\t\t\tquery,\n\t\t\tstream: true,\n\t\t});\n\n\t\treturn {\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tstream: getMappedStream(response),\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1Prompt, LanguageModelV1ProviderMetadata } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ content, role: \"system\" });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata: part.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t\trole: \"user\",\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcase \"reasoning\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part type: ${exhaustiveCheck.type}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\tcontent: text,\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tfunction: { arguments: args, name },\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { images, messages };\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tcompletion_tokens: 0,\n\t\tprompt_tokens: 0,\n\t};\n\n\treturn {\n\t\tcompletionTokens: usage.completion_tokens,\n\t\tpromptTokens: usage.prompt_tokens,\n\t};\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","import type { LanguageModelV1, LanguageModelV1FunctionToolCall } from \"@ai-sdk/provider\";\n\n/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\t// biome-ignore lint/correctness/noUnusedVariables: they need to be destructured\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (_error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${\n\t\t\turlParams ? `?${urlParams}` : \"\"\n\t\t}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tbody,\n\t\t\theaders,\n\t\t\tmethod: \"POST\",\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n\nexport function prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tool_choice: undefined, tools: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\tfunction: {\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t\ttype: \"function\",\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tool_choice: undefined, tools: mappedTools };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tool_choice: type, tools: mappedTools };\n\t\tcase \"none\":\n\t\t\treturn { tool_choice: type, tools: mappedTools };\n\t\tcase \"required\":\n\t\t\treturn { tool_choice: \"any\", tools: mappedTools };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttool_choice: \"any\",\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nexport function lastMessageWasUser<T extends { role: string }>(messages: T[]) {\n\treturn messages.length > 0 && messages[messages.length - 1]!.role === \"user\";\n}\n\nfunction mergePartialToolCalls(partialCalls: any[]) {\n\tconst mergedCallsByIndex: any = {};\n\n\tfor (const partialCall of partialCalls) {\n\t\tconst index = partialCall.index;\n\n\t\tif (!mergedCallsByIndex[index]) {\n\t\t\tmergedCallsByIndex[index] = {\n\t\t\t\tfunction: {\n\t\t\t\t\targuments: \"\",\n\t\t\t\t\tname: partialCall.function?.name || \"\",\n\t\t\t\t},\n\t\t\t\tid: partialCall.id || \"\",\n\t\t\t\ttype: partialCall.type || \"\",\n\t\t\t};\n\t\t} else {\n\t\t\tif (partialCall.id) {\n\t\t\t\tmergedCallsByIndex[index].id = partialCall.id;\n\t\t\t}\n\t\t\tif (partialCall.type) {\n\t\t\t\tmergedCallsByIndex[index].type = partialCall.type;\n\t\t\t}\n\n\t\t\tif (partialCall.function?.name) {\n\t\t\t\tmergedCallsByIndex[index].function.name = partialCall.function.name;\n\t\t\t}\n\t\t}\n\n\t\t// Append arguments if available, this assumes arguments come in the right order\n\t\tif (partialCall.function?.arguments) {\n\t\t\tmergedCallsByIndex[index].function.arguments += partialCall.function.arguments;\n\t\t}\n\t}\n\n\treturn Object.values(mergedCallsByIndex);\n}\n\nfunction processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {\n\t// Check for OpenAI format tool calls first\n\tif (toolCall.function && toolCall.id) {\n\t\treturn {\n\t\t\targs:\n\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {}),\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolCallType: \"function\",\n\t\t\ttoolName: toolCall.function.name,\n\t\t};\n\t}\n\treturn {\n\t\targs:\n\t\t\ttypeof toolCall.arguments === \"string\"\n\t\t\t\t? toolCall.arguments\n\t\t\t\t: JSON.stringify(toolCall.arguments || {}),\n\t\ttoolCallId: toolCall.name,\n\t\ttoolCallType: \"function\",\n\t\ttoolName: toolCall.name,\n\t};\n}\n\nexport function processToolCalls(output: any): LanguageModelV1FunctionToolCall[] {\n\tif (output.tool_calls && Array.isArray(output.tool_calls)) {\n\t\treturn output.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\tif (\n\t\toutput?.choices?.[0]?.message?.tool_calls &&\n\t\tArray.isArray(output.choices[0].message.tool_calls)\n\t) {\n\t\treturn output.choices[0].message.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\treturn [];\n}\n\nexport function processPartialToolCalls(partialToolCalls: any[]) {\n\tconst mergedToolCalls = mergePartialToolCalls(partialToolCalls);\n\treturn processToolCalls({ tool_calls: mergedToolCalls });\n}\n","import type { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { processPartialToolCalls } from \"./utils\";\n\nexport function getMappedStream(response: Response) {\n\tconst chunkEvent = events(response);\n\tlet usage = { completionTokens: 0, promptTokens: 0 };\n\tconst partialToolCalls: any[] = [];\n\n\treturn new ReadableStream<LanguageModelV1StreamPart>({\n\t\tasync start(controller) {\n\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\tif (!event.data) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t}\n\t\t\t\tif (chunk.tool_calls) {\n\t\t\t\t\tpartialToolCalls.push(...chunk.tool_calls);\n\t\t\t\t}\n\t\t\t\tchunk.response?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t});\n\t\t\t\tchunk?.choices?.[0]?.delta?.reasoning_content?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"reasoning\",\n\t\t\t\t\t\ttextDelta: chunk.choices[0].delta.reasoning_content,\n\t\t\t\t\t});\n\t\t\t}\n\n\t\t\tif (partialToolCalls.length > 0) {\n\t\t\t\tconst toolCalls = processPartialToolCalls(partialToolCalls);\n\t\t\t\ttoolCalls.map((toolCall) => {\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t});\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontroller.enqueue({\n\t\t\t\tfinishReason: \"stop\",\n\t\t\t\ttype: \"finish\",\n\t\t\t\tusage: usage,\n\t\t\t});\n\t\t\tcontroller.close();\n\t\t},\n\t});\n}\n","import { type EmbeddingModelV1, TooManyEmbeddingValuesForCallError } from \"@ai-sdk/provider\";\nimport type { StringLike } from \"./utils\";\nimport type { EmbeddingModels } from \"./workersai-models\";\n\nexport type WorkersAIEmbeddingConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport type WorkersAIEmbeddingSettings = {\n\tgateway?: GatewayOptions;\n\tmaxEmbeddingsPerCall?: number;\n\tsupportsParallelCalls?: boolean;\n} & {\n\t/**\n\t * Arbitrary provider-specific options forwarded unmodified.\n\t */\n\t[key: string]: StringLike;\n};\n\nexport class WorkersAIEmbeddingModel implements EmbeddingModelV1<string> {\n\t/**\n\t * Semantic version of the {@link EmbeddingModelV1} specification implemented\n\t * by this class. It never changes.\n\t */\n\treadonly specificationVersion = \"v1\";\n\treadonly modelId: EmbeddingModels;\n\tprivate readonly config: WorkersAIEmbeddingConfig;\n\tprivate readonly settings: WorkersAIEmbeddingSettings;\n\n\t/**\n\t * Provider name exposed for diagnostics and error reporting.\n\t */\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tget maxEmbeddingsPerCall(): number {\n\t\t// https://developers.cloudflare.com/workers-ai/platform/limits/#text-embeddings\n\t\tconst maxEmbeddingsPerCall = this.modelId === \"@cf/baai/bge-large-en-v1.5\" ? 1500 : 3000;\n\t\treturn this.settings.maxEmbeddingsPerCall ?? maxEmbeddingsPerCall;\n\t}\n\n\tget supportsParallelCalls(): boolean {\n\t\treturn this.settings.supportsParallelCalls ?? true;\n\t}\n\n\tconstructor(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings,\n\t\tconfig: WorkersAIEmbeddingConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tasync doEmbed({\n\t\tvalues,\n\t}: Parameters<EmbeddingModelV1<string>[\"doEmbed\"]>[0]): Promise<\n\t\tAwaited<ReturnType<EmbeddingModelV1<string>[\"doEmbed\"]>>\n\t> {\n\t\tif (values.length > this.maxEmbeddingsPerCall) {\n\t\t\tthrow new TooManyEmbeddingValuesForCallError({\n\t\t\t\tmaxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\tprovider: this.provider,\n\t\t\t\tvalues,\n\t\t\t});\n\t\t}\n\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\tconst response = await this.config.binding.run(\n\t\t\tthis.modelId,\n\t\t\t// @ts-ignore: Error introduced with \"@cloudflare/workers-types\": \"^4.20250617.0\"\n\t\t\t{\n\t\t\t\ttext: values,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\treturn {\n\t\t\t// @ts-ignore: Error introduced with \"@cloudflare/workers-types\": \"^4.20250617.0\"\n\t\t\tembeddings: response.data,\n\t\t};\n\t}\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIFinishReason } from \"./map-workersai-finish-reason\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { lastMessageWasUser, prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\t\t\trandom_seed: seed,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ function: mode.tool, type: \"function\" }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// biome-ignore lint/correctness/noUnusedVariables: this needs to be destructured\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tmessages: messages,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\tfinishReason: mapWorkersAIFinishReason(output),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\trawResponse: { body: output },\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\t// @ts-ignore: Missing types\n\t\t\treasoning: output?.choices?.[0]?.message?.reasoning_content,\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.reasoning && typeof response.reasoning === \"string\") {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"reasoning\",\n\t\t\t\t\t\t\t\ttextDelta: response.reasoning,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\tfinishReason: mapWorkersAIFinishReason(response),\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tmessages: messages,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tstream: getMappedStream(new Response(response)),\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1FinishReason } from \"@ai-sdk/provider\";\n\nexport function mapWorkersAIFinishReason(finishReasonOrResponse: any): LanguageModelV1FinishReason {\n\tlet finishReason: string | null | undefined;\n\n\t// If it's a string/null/undefined, use it directly (original behavior)\n\tif (\n\t\ttypeof finishReasonOrResponse === \"string\" ||\n\t\tfinishReasonOrResponse === null ||\n\t\tfinishReasonOrResponse === undefined\n\t) {\n\t\tfinishReason = finishReasonOrResponse;\n\t} else if (typeof finishReasonOrResponse === \"object\" && finishReasonOrResponse !== null) {\n\t\tconst response = finishReasonOrResponse;\n\n\t\tif (\n\t\t\t\"choices\" in response &&\n\t\t\tArray.isArray(response.choices) &&\n\t\t\tresponse.choices.length > 0\n\t\t) {\n\t\t\tfinishReason = response.choices[0].finish_reason;\n\t\t} else if (\"finish_reason\" in response) {\n\t\t\tfinishReason = response.finish_reason;\n\t\t} else {\n\t\t\tfinishReason = undefined;\n\t\t}\n\t}\n\n\tswitch (finishReason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\tcase \"model_length\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_calls\":\n\t\t\treturn \"tool-calls\";\n\t\tcase \"error\":\n\t\t\treturn \"error\";\n\t\tcase \"other\":\n\t\t\treturn \"other\";\n\t\tcase \"unknown\":\n\t\t\treturn \"unknown\";\n\t\tdefault:\n\t\t\t// Default to `stop` for backwards compatibility\n\t\t\treturn \"stop\";\n\t}\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\theight,\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\tresponse: {\n\t\t\t\theaders: {},\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\ttimestamp: new Date(),\n\t\t\t},\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\theight: parseInteger(height),\n\t\twidth: parseInteger(width),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { AutoRAGChatLanguageModel } from \"./autorag-chat-language-model\";\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { createRun } from \"./utils\";\nimport {\n\tWorkersAIEmbeddingModel,\n\ttype WorkersAIEmbeddingSettings,\n} from \"./workers-ai-embedding-model\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type {\n\tEmbeddingModels,\n\tImageGenerationModels,\n\tTextGenerationModels,\n} from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\tembedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbeddingModel(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.chat\",\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.image\",\n\t\t});\n\tconst createEmbeddingModel = (\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings = {},\n\t) =>\n\t\tnew WorkersAIEmbeddingModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.embedding\",\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.embedding = createEmbeddingModel;\n\tprovider.textEmbedding = createEmbeddingModel;\n\tprovider.textEmbeddingModel = createEmbeddingModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n\nexport type AutoRAGSettings = {\n\tbinding: AutoRAG;\n};\n\nexport interface AutoRAGProvider {\n\t(options?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(settings?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createAutoRAG(options: AutoRAGSettings): AutoRAGProvider {\n\tconst binding = options.binding;\n\n\tconst createChatModel = (settings: AutoRAGChatSettings = {}) =>\n\t\t// @ts-ignore Needs fix from @cloudflare/workers-types for custom types\n\t\tnew AutoRAGChatLanguageModel(\"@cf/meta/llama-3.3-70b-instruct-fp8-fast\", settings, {\n\t\t\tbinding,\n\t\t\tprovider: \"autorag.chat\",\n\t\t});\n\n\tconst provider = (settings?: AutoRAGChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAAA;AAAA,EAGC;AAAA,OACM;;;ACDA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,SAAS,MAAM,SAAS,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,OAAO,KAAK;AAAA,oBACZ,UAAU,KAAK;AAAA,oBACf,kBAAkB,KAAK;AAAA,kBACxB,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,UACX,MAAM;AAAA,QACP,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YAEA,KAAK,aAAa;AACjB,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YAEA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,UAAU;AAAA,kBACT,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,kBACnC,MAAM,KAAK;AAAA,gBACZ;AAAA,gBACA,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,cACP,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,0BAA0B,gBAAgB,IAAI,EAAE;AAAA,YACjE;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,SAAS;AAAA,UACT,MAAM;AAAA,UACN,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,UAAU,EAAE,WAAW,MAAM,KAAK;AAAA,YAClC,IAAI;AAAA,YACJ,MAAM;AAAA,UACP,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,MAAM,aAAa;AAAA,YACnB,MAAM;AAAA,UACP,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,QAAQ,SAAS;AAC3B;;;ACpIO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,mBAAmB;AAAA,IACnB,eAAe;AAAA,EAChB;AAEA,SAAO;AAAA,IACN,kBAAkB,MAAM;AAAA,IACxB,cAAc,MAAM;AAAA,EACrB;AACD;;;ACdA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACdO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAE1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,QAAQ;AAChB,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GACrF,YAAY,IAAI,SAAS,KAAK,EAC/B;AAGA,UAAM,UAAU;AAAA,MACf,eAAe,UAAU,MAAM;AAAA,MAC/B,gBAAgB;AAAA,IACjB;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;AAEO,SAAS,0BACf,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,aAAa,QAAW,OAAO,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,UAAU;AAAA;AAAA,MAET,aAAa,KAAK;AAAA,MAClB,MAAM,KAAK;AAAA;AAAA,MAEX,YAAY,KAAK;AAAA,IAClB;AAAA,IACA,MAAM;AAAA,EACP,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,aAAa,QAAW,OAAO,YAAY;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,aAAa,MAAM,OAAO,YAAY;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,aAAa,MAAM,OAAO,YAAY;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,aAAa,OAAO,OAAO,YAAY;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,aAAa;AAAA,QACb,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,MAC/E;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEO,SAAS,mBAA+C,UAAe;AAC7E,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAG,SAAS;AACvE;AAEA,SAAS,sBAAsB,cAAqB;AACnD,QAAM,qBAA0B,CAAC;AAEjC,aAAW,eAAe,cAAc;AACvC,UAAM,QAAQ,YAAY;AAE1B,QAAI,CAAC,mBAAmB,KAAK,GAAG;AAC/B,yBAAmB,KAAK,IAAI;AAAA,QAC3B,UAAU;AAAA,UACT,WAAW;AAAA,UACX,MAAM,YAAY,UAAU,QAAQ;AAAA,QACrC;AAAA,QACA,IAAI,YAAY,MAAM;AAAA,QACtB,MAAM,YAAY,QAAQ;AAAA,MAC3B;AAAA,IACD,OAAO;AACN,UAAI,YAAY,IAAI;AACnB,2BAAmB,KAAK,EAAE,KAAK,YAAY;AAAA,MAC5C;AACA,UAAI,YAAY,MAAM;AACrB,2BAAmB,KAAK,EAAE,OAAO,YAAY;AAAA,MAC9C;AAEA,UAAI,YAAY,UAAU,MAAM;AAC/B,2BAAmB,KAAK,EAAE,SAAS,OAAO,YAAY,SAAS;AAAA,MAChE;AAAA,IACD;AAGA,QAAI,YAAY,UAAU,WAAW;AACpC,yBAAmB,KAAK,EAAE,SAAS,aAAa,YAAY,SAAS;AAAA,IACtE;AAAA,EACD;AAEA,SAAO,OAAO,OAAO,kBAAkB;AACxC;AAEA,SAAS,gBAAgB,UAAgD;AAExE,MAAI,SAAS,YAAY,SAAS,IAAI;AACrC,WAAO;AAAA,MACN,MACC,OAAO,SAAS,SAAS,cAAc,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,aAAa,CAAC,CAAC;AAAA,MACpD,YAAY,SAAS;AAAA,MACrB,cAAc;AAAA,MACd,UAAU,SAAS,SAAS;AAAA,IAC7B;AAAA,EACD;AACA,SAAO;AAAA,IACN,MACC,OAAO,SAAS,cAAc,WAC3B,SAAS,YACT,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,IAC3C,YAAY,SAAS;AAAA,IACrB,cAAc;AAAA,IACd,UAAU,SAAS;AAAA,EACpB;AACD;AAEO,SAAS,iBAAiB,QAAgD;AAChF,MAAI,OAAO,cAAc,MAAM,QAAQ,OAAO,UAAU,GAAG;AAC1D,WAAO,OAAO,WAAW,IAAI,CAAC,aAAkB;AAC/C,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,MACC,QAAQ,UAAU,CAAC,GAAG,SAAS,cAC/B,MAAM,QAAQ,OAAO,QAAQ,CAAC,EAAE,QAAQ,UAAU,GACjD;AACD,WAAO,OAAO,QAAQ,CAAC,EAAE,QAAQ,WAAW,IAAI,CAAC,aAAkB;AAClE,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,SAAO,CAAC;AACT;AAEO,SAAS,wBAAwB,kBAAyB;AAChE,QAAM,kBAAkB,sBAAsB,gBAAgB;AAC9D,SAAO,iBAAiB,EAAE,YAAY,gBAAgB,CAAC;AACxD;;;ACzQO,SAAS,gBAAgB,UAAoB;AACnD,QAAM,aAAa,OAAO,QAAQ;AAClC,MAAI,QAAQ,EAAE,kBAAkB,GAAG,cAAc,EAAE;AACnD,QAAM,mBAA0B,CAAC;AAEjC,SAAO,IAAI,eAA0C;AAAA,IACpD,MAAM,MAAM,YAAY;AACvB,uBAAiB,SAAS,YAAY;AACrC,YAAI,CAAC,MAAM,MAAM;AAChB;AAAA,QACD;AACA,YAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,QACD;AACA,cAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,YAAI,MAAM,OAAO;AAChB,kBAAQ,kBAAkB,KAAK;AAAA,QAChC;AACA,YAAI,MAAM,YAAY;AACrB,2BAAiB,KAAK,GAAG,MAAM,UAAU;AAAA,QAC1C;AACA,cAAM,UAAU,UACf,WAAW,QAAQ;AAAA,UAClB,WAAW,MAAM;AAAA,UACjB,MAAM;AAAA,QACP,CAAC;AACF,eAAO,UAAU,CAAC,GAAG,OAAO,mBAAmB,UAC9C,WAAW,QAAQ;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,MAAM,QAAQ,CAAC,EAAE,MAAM;AAAA,QACnC,CAAC;AAAA,MACH;AAEA,UAAI,iBAAiB,SAAS,GAAG;AAChC,cAAM,YAAY,wBAAwB,gBAAgB;AAC1D,kBAAU,IAAI,CAAC,aAAa;AAC3B,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,GAAG;AAAA,UACJ,CAAC;AAAA,QACF,CAAC;AAAA,MACF;AAEA,iBAAW,QAAQ;AAAA,QAClB,cAAc;AAAA,QACd,MAAM;AAAA,QACN;AAAA,MACD,CAAC;AACD,iBAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;;;APrCO,IAAM,2BAAN,MAA0D;AAAA,EAShE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,UAAU,+BAA+B,MAAM;AAAA;AAAA,MAE/C,OAAO,KAAK;AAAA,IACb;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,aAAa,KAAK;AAAA,cAClB,MAAM;AAAA,YACP;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,UAAU,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACjD,OAAO,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAAA,IAC9E,CAAC;AAED,WAAO;AAAA,MACN,cAAc;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,SAAS,OAAO,KAAK,IAAI,CAAC,EAAE,SAAS,UAAU,MAAM,OAAO;AAAA,QAC3D,IAAI;AAAA,QACJ,kBAAkB;AAAA,UACjB,YAAY,EAAE,MAAM;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,QACZ,KAAK;AAAA,MACN,EAAE;AAAA;AAAA,MACF,MAAM,OAAO;AAAA,MACb,WAAW,iBAAiB,MAAM;AAAA,MAClC,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,QAAQ,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAEpF,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACnD;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACN,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,QAAQ,gBAAgB,QAAQ;AAAA,MAChC;AAAA,IACD;AAAA,EACD;AACD;;;AQ1KA,SAAgC,0CAA0C;AAqBnE,IAAM,0BAAN,MAAkE;AAAA,EA2BxE,YACC,SACA,UACA,QACC;AA1BF;AAAA;AAAA;AAAA;AAAA,wBAAS,wBAAuB;AAChC,wBAAS;AACT,wBAAiB;AACjB,wBAAiB;AAwBhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA;AAAA;AAAA;AAAA,EAtBA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,uBAA+B;AAElC,UAAM,uBAAuB,KAAK,YAAY,+BAA+B,OAAO;AACpF,WAAO,KAAK,SAAS,wBAAwB;AAAA,EAC9C;AAAA,EAEA,IAAI,wBAAiC;AACpC,WAAO,KAAK,SAAS,yBAAyB;AAAA,EAC/C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACb;AAAA,EACD,GAEE;AACD,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC9C,YAAM,IAAI,mCAAmC;AAAA,QAC5C,sBAAsB,KAAK;AAAA,QAC3B,SAAS,KAAK;AAAA,QACd,UAAU,KAAK;AAAA,QACf;AAAA,MACD,CAAC;AAAA,IACF;AAEA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAEhD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA;AAAA,MAEL;AAAA,QACC,MAAM;AAAA,MACP;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,WAAO;AAAA;AAAA,MAEN,YAAY,SAAS;AAAA,IACtB;AAAA,EACD;AACD;;;ACxFA;AAAA,EAIC,iCAAAC;AAAA,OACM;;;ACHA,SAAS,yBAAyB,wBAA0D;AAClG,MAAI;AAGJ,MACC,OAAO,2BAA2B,YAClC,2BAA2B,QAC3B,2BAA2B,QAC1B;AACD,mBAAe;AAAA,EAChB,WAAW,OAAO,2BAA2B,YAAY,2BAA2B,MAAM;AACzF,UAAM,WAAW;AAEjB,QACC,aAAa,YACb,MAAM,QAAQ,SAAS,OAAO,KAC9B,SAAS,QAAQ,SAAS,GACzB;AACD,qBAAe,SAAS,QAAQ,CAAC,EAAE;AAAA,IACpC,WAAW,mBAAmB,UAAU;AACvC,qBAAe,SAAS;AAAA,IACzB,OAAO;AACN,qBAAe;AAAA,IAChB;AAAA,EACD;AAEA,UAAQ,cAAc;AAAA,IACrB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AAAA,IACL,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR;AAEC,aAAO;AAAA,EACT;AACD;;;AD1BO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,YAAY;AAAA;AAAA,MAEZ,OAAO,KAAK;AAAA,MACZ,aAAa;AAAA;AAAA,MAGb,aAAa,KAAK,SAAS;AAAA,MAC3B;AAAA,MACA,OAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,aAAa,KAAK;AAAA,cAClB,MAAM;AAAA,YACP;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,UAAU,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAIC,+BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAG1E,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,cAAc,yBAAyB,MAAM;AAAA,MAC7C,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,aAAa,EAAE,MAAM,OAAO;AAAA,MAC5B,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,iBAAiB,MAAM;AAAA;AAAA,MAElC,WAAW,QAAQ,UAAU,CAAC,GAAG,SAAS;AAAA,MAC1C,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAK1E,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,WAAWA,UAAS;AAAA,gBACpB,MAAM;AAAA,cACP,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,gBAAIA,UAAS,aAAa,OAAOA,UAAS,cAAc,UAAU;AACjE,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,uBAAW,QAAQ;AAAA,cAClB,cAAc,yBAAyBA,SAAQ;AAAA,cAC/C,MAAM;AAAA,cACN,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,QAAQ,gBAAgB,IAAI,SAAS,QAAQ,CAAC;AAAA,MAC9C;AAAA,IACD;AAAA,EACD;AACD;;;AEjRO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA,UAAU;AAAA,QACT,SAAS,CAAC;AAAA,QACV,SAAS,KAAK;AAAA,QACd,WAAW,oBAAI,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,QAAQ,aAAa,MAAM;AAAA,IAC3B,OAAO,aAAa,KAAK;AAAA,EAC1B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;AC/BO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AACF,QAAM,uBAAuB,CAC5B,SACA,WAAuC,CAAC,MAExC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC9C;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;AAiBO,SAAS,cAAc,SAA2C;AACxE,QAAM,UAAU,QAAQ;AAExB,QAAM,kBAAkB,CAAC,WAAgC,CAAC;AAAA;AAAA,IAEzD,IAAI,yBAAyB,4CAA4C,UAAU;AAAA,MAClF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA;AAEF,QAAM,WAAW,CAAC,aAAmC;AACpD,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,QAAQ;AAAA,EAChC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACR;","names":["split","UnsupportedFunctionalityError","UnsupportedFunctionalityError","response","stream"]}
|
1
|
+
{"version":3,"sources":["../src/autorag-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../src/map-workersai-usage.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/utils.ts","../src/streaming.ts","../src/workers-ai-embedding-model.ts","../src/workersai-chat-language-model.ts","../src/map-workersai-finish-reason.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype AutoRAGChatConfig = {\n\tprovider: string;\n\tbinding: AutoRAG;\n\tgateway?: GatewayOptions;\n};\n\nexport class AutoRAGChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: AutoRAGChatSettings;\n\n\tprivate readonly config: AutoRAGChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: AutoRAGChatSettings,\n\t\tconfig: AutoRAGChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ function: mode.tool, type: \"function\" }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst output = await this.config.binding.aiSearch({\n\t\t\tquery: messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\"),\n\t\t});\n\n\t\treturn {\n\t\t\tfinishReason: \"stop\",\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tsources: output.data.map(({ file_id, filename, score }) => ({\n\t\t\t\tid: file_id,\n\t\t\t\tproviderMetadata: {\n\t\t\t\t\tattributes: { score },\n\t\t\t\t},\n\t\t\t\tsourceType: \"url\",\n\t\t\t\turl: filename,\n\t\t\t})), // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\ttext: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst query = messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\");\n\n\t\tconst response = await this.config.binding.aiSearch({\n\t\t\tquery,\n\t\t\tstream: true,\n\t\t});\n\n\t\treturn {\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tstream: getMappedStream(response),\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1Prompt, LanguageModelV1ProviderMetadata } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ content, role: \"system\" });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata: part.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t\trole: \"user\",\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcase \"reasoning\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part type: ${exhaustiveCheck.type}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\tcontent: text,\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tfunction: { arguments: args, name },\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { images, messages };\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tcompletion_tokens: 0,\n\t\tprompt_tokens: 0,\n\t};\n\n\treturn {\n\t\tcompletionTokens: usage.completion_tokens,\n\t\tpromptTokens: usage.prompt_tokens,\n\t};\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","import type { LanguageModelV1, LanguageModelV1FunctionToolCall } from \"@ai-sdk/provider\";\n\n/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\t// biome-ignore lint/correctness/noUnusedVariables: they need to be destructured\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (_error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${\n\t\t\turlParams ? `?${urlParams}` : \"\"\n\t\t}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tbody,\n\t\t\theaders,\n\t\t\tmethod: \"POST\",\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n\nexport function prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tool_choice: undefined, tools: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\tfunction: {\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t\ttype: \"function\",\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tool_choice: undefined, tools: mappedTools };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tool_choice: type, tools: mappedTools };\n\t\tcase \"none\":\n\t\t\treturn { tool_choice: type, tools: mappedTools };\n\t\tcase \"required\":\n\t\t\treturn { tool_choice: \"any\", tools: mappedTools };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttool_choice: \"any\",\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nexport function lastMessageWasUser<T extends { role: string }>(messages: T[]) {\n\treturn messages.length > 0 && messages[messages.length - 1]!.role === \"user\";\n}\n\nfunction mergePartialToolCalls(partialCalls: any[]) {\n\tconst mergedCallsByIndex: any = {};\n\n\tfor (const partialCall of partialCalls) {\n\t\tconst index = partialCall.index;\n\n\t\tif (!mergedCallsByIndex[index]) {\n\t\t\tmergedCallsByIndex[index] = {\n\t\t\t\tfunction: {\n\t\t\t\t\targuments: \"\",\n\t\t\t\t\tname: partialCall.function?.name || \"\",\n\t\t\t\t},\n\t\t\t\tid: partialCall.id || \"\",\n\t\t\t\ttype: partialCall.type || \"\",\n\t\t\t};\n\t\t} else {\n\t\t\tif (partialCall.id) {\n\t\t\t\tmergedCallsByIndex[index].id = partialCall.id;\n\t\t\t}\n\t\t\tif (partialCall.type) {\n\t\t\t\tmergedCallsByIndex[index].type = partialCall.type;\n\t\t\t}\n\n\t\t\tif (partialCall.function?.name) {\n\t\t\t\tmergedCallsByIndex[index].function.name = partialCall.function.name;\n\t\t\t}\n\t\t}\n\n\t\t// Append arguments if available, this assumes arguments come in the right order\n\t\tif (partialCall.function?.arguments) {\n\t\t\tmergedCallsByIndex[index].function.arguments += partialCall.function.arguments;\n\t\t}\n\t}\n\n\treturn Object.values(mergedCallsByIndex);\n}\n\nfunction processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {\n\t// Check for OpenAI format tool calls first\n\tif (toolCall.function && toolCall.id) {\n\t\treturn {\n\t\t\targs:\n\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {}),\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolCallType: \"function\",\n\t\t\ttoolName: toolCall.function.name,\n\t\t};\n\t}\n\treturn {\n\t\targs:\n\t\t\ttypeof toolCall.arguments === \"string\"\n\t\t\t\t? toolCall.arguments\n\t\t\t\t: JSON.stringify(toolCall.arguments || {}),\n\t\ttoolCallId: toolCall.name,\n\t\ttoolCallType: \"function\",\n\t\ttoolName: toolCall.name,\n\t};\n}\n\nexport function processToolCalls(output: any): LanguageModelV1FunctionToolCall[] {\n\tif (output.tool_calls && Array.isArray(output.tool_calls)) {\n\t\treturn output.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\tif (\n\t\toutput?.choices?.[0]?.message?.tool_calls &&\n\t\tArray.isArray(output.choices[0].message.tool_calls)\n\t) {\n\t\treturn output.choices[0].message.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\treturn [];\n}\n\nexport function processPartialToolCalls(partialToolCalls: any[]) {\n\tconst mergedToolCalls = mergePartialToolCalls(partialToolCalls);\n\treturn processToolCalls({ tool_calls: mergedToolCalls });\n}\n\nexport function processText(output: AiTextGenerationOutput): string | undefined {\n\t// @ts-expect-error OpenAI format not typed yet\n\tif (output?.choices?.[0]?.message?.content.length) {\n\t\t// @ts-expect-error OpenAI format not typed yet\n\t\treturn output?.choices?.[0]?.message?.content;\n\t}\n\n\tif (\"response\" in output) {\n\t\tif (typeof output.response === \"object\" && output.response !== null) {\n\t\t\treturn JSON.stringify(output.response); // ai-sdk expects a string here\n\t\t}\n\n\t\treturn output.response;\n\t}\n}\n","import type { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { processPartialToolCalls } from \"./utils\";\n\nexport function getMappedStream(response: Response) {\n\tconst chunkEvent = events(response);\n\tlet usage = { completionTokens: 0, promptTokens: 0 };\n\tconst partialToolCalls: any[] = [];\n\n\treturn new ReadableStream<LanguageModelV1StreamPart>({\n\t\tasync start(controller) {\n\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\tif (!event.data) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t}\n\t\t\t\tif (chunk.tool_calls) {\n\t\t\t\t\tpartialToolCalls.push(...chunk.tool_calls);\n\t\t\t\t}\n\t\t\t\tchunk.response?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t});\n\t\t\t\tchunk?.choices?.[0]?.delta?.reasoning_content?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"reasoning\",\n\t\t\t\t\t\ttextDelta: chunk.choices[0].delta.reasoning_content,\n\t\t\t\t\t});\n\t\t\t\tchunk?.choices?.[0]?.delta?.content?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\ttextDelta: chunk.choices[0].delta.content,\n\t\t\t\t\t});\n\t\t\t}\n\n\t\t\tif (partialToolCalls.length > 0) {\n\t\t\t\tconst toolCalls = processPartialToolCalls(partialToolCalls);\n\t\t\t\ttoolCalls.map((toolCall) => {\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t});\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontroller.enqueue({\n\t\t\t\tfinishReason: \"stop\",\n\t\t\t\ttype: \"finish\",\n\t\t\t\tusage: usage,\n\t\t\t});\n\t\t\tcontroller.close();\n\t\t},\n\t});\n}\n","import { type EmbeddingModelV1, TooManyEmbeddingValuesForCallError } from \"@ai-sdk/provider\";\nimport type { StringLike } from \"./utils\";\nimport type { EmbeddingModels } from \"./workersai-models\";\n\nexport type WorkersAIEmbeddingConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport type WorkersAIEmbeddingSettings = {\n\tgateway?: GatewayOptions;\n\tmaxEmbeddingsPerCall?: number;\n\tsupportsParallelCalls?: boolean;\n} & {\n\t/**\n\t * Arbitrary provider-specific options forwarded unmodified.\n\t */\n\t[key: string]: StringLike;\n};\n\nexport class WorkersAIEmbeddingModel implements EmbeddingModelV1<string> {\n\t/**\n\t * Semantic version of the {@link EmbeddingModelV1} specification implemented\n\t * by this class. It never changes.\n\t */\n\treadonly specificationVersion = \"v1\";\n\treadonly modelId: EmbeddingModels;\n\tprivate readonly config: WorkersAIEmbeddingConfig;\n\tprivate readonly settings: WorkersAIEmbeddingSettings;\n\n\t/**\n\t * Provider name exposed for diagnostics and error reporting.\n\t */\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tget maxEmbeddingsPerCall(): number {\n\t\t// https://developers.cloudflare.com/workers-ai/platform/limits/#text-embeddings\n\t\tconst maxEmbeddingsPerCall = this.modelId === \"@cf/baai/bge-large-en-v1.5\" ? 1500 : 3000;\n\t\treturn this.settings.maxEmbeddingsPerCall ?? maxEmbeddingsPerCall;\n\t}\n\n\tget supportsParallelCalls(): boolean {\n\t\treturn this.settings.supportsParallelCalls ?? true;\n\t}\n\n\tconstructor(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings,\n\t\tconfig: WorkersAIEmbeddingConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tasync doEmbed({\n\t\tvalues,\n\t}: Parameters<EmbeddingModelV1<string>[\"doEmbed\"]>[0]): Promise<\n\t\tAwaited<ReturnType<EmbeddingModelV1<string>[\"doEmbed\"]>>\n\t> {\n\t\tif (values.length > this.maxEmbeddingsPerCall) {\n\t\t\tthrow new TooManyEmbeddingValuesForCallError({\n\t\t\t\tmaxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\tprovider: this.provider,\n\t\t\t\tvalues,\n\t\t\t});\n\t\t}\n\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\tconst response = await this.config.binding.run(\n\t\t\tthis.modelId,\n\t\t\t// @ts-ignore: Error introduced with \"@cloudflare/workers-types\": \"^4.20250617.0\"\n\t\t\t{\n\t\t\t\ttext: values,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\treturn {\n\t\t\t// @ts-ignore: Error introduced with \"@cloudflare/workers-types\": \"^4.20250617.0\"\n\t\t\tembeddings: response.data,\n\t\t};\n\t}\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIFinishReason } from \"./map-workersai-finish-reason\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport {\n\tlastMessageWasUser,\n\tprepareToolsAndToolChoice,\n\tprocessText,\n\tprocessToolCalls,\n} from \"./utils\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\t\t\trandom_seed: seed,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ function: mode.tool, type: \"function\" }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// biome-ignore lint/correctness/noUnusedVariables: this needs to be destructured\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tmessages: messages,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\tfinishReason: mapWorkersAIFinishReason(output),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\trawResponse: { body: output },\n\t\t\ttext: processText(output),\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\t// @ts-ignore: Missing types\n\t\t\treasoning: output?.choices?.[0]?.message?.reasoning_content,\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.reasoning && typeof response.reasoning === \"string\") {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"reasoning\",\n\t\t\t\t\t\t\t\ttextDelta: response.reasoning,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\tfinishReason: mapWorkersAIFinishReason(response),\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tmessages: messages,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tstream: getMappedStream(new Response(response)),\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1FinishReason } from \"@ai-sdk/provider\";\n\nexport function mapWorkersAIFinishReason(finishReasonOrResponse: any): LanguageModelV1FinishReason {\n\tlet finishReason: string | null | undefined;\n\n\t// If it's a string/null/undefined, use it directly (original behavior)\n\tif (\n\t\ttypeof finishReasonOrResponse === \"string\" ||\n\t\tfinishReasonOrResponse === null ||\n\t\tfinishReasonOrResponse === undefined\n\t) {\n\t\tfinishReason = finishReasonOrResponse;\n\t} else if (typeof finishReasonOrResponse === \"object\" && finishReasonOrResponse !== null) {\n\t\tconst response = finishReasonOrResponse;\n\n\t\tif (\n\t\t\t\"choices\" in response &&\n\t\t\tArray.isArray(response.choices) &&\n\t\t\tresponse.choices.length > 0\n\t\t) {\n\t\t\tfinishReason = response.choices[0].finish_reason;\n\t\t} else if (\"finish_reason\" in response) {\n\t\t\tfinishReason = response.finish_reason;\n\t\t} else {\n\t\t\tfinishReason = undefined;\n\t\t}\n\t}\n\n\tswitch (finishReason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\tcase \"model_length\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_calls\":\n\t\t\treturn \"tool-calls\";\n\t\tcase \"error\":\n\t\t\treturn \"error\";\n\t\tcase \"other\":\n\t\t\treturn \"other\";\n\t\tcase \"unknown\":\n\t\t\treturn \"unknown\";\n\t\tdefault:\n\t\t\t// Default to `stop` for backwards compatibility\n\t\t\treturn \"stop\";\n\t}\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\theight,\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\tresponse: {\n\t\t\t\theaders: {},\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\ttimestamp: new Date(),\n\t\t\t},\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\theight: parseInteger(height),\n\t\twidth: parseInteger(width),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { AutoRAGChatLanguageModel } from \"./autorag-chat-language-model\";\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { createRun } from \"./utils\";\nimport {\n\tWorkersAIEmbeddingModel,\n\ttype WorkersAIEmbeddingSettings,\n} from \"./workers-ai-embedding-model\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type {\n\tEmbeddingModels,\n\tImageGenerationModels,\n\tTextGenerationModels,\n} from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\tembedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbeddingModel(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.chat\",\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.image\",\n\t\t});\n\tconst createEmbeddingModel = (\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings = {},\n\t) =>\n\t\tnew WorkersAIEmbeddingModel(modelId, settings, {\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t\tprovider: \"workersai.embedding\",\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.embedding = createEmbeddingModel;\n\tprovider.textEmbedding = createEmbeddingModel;\n\tprovider.textEmbeddingModel = createEmbeddingModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n\nexport type AutoRAGSettings = {\n\tbinding: AutoRAG;\n};\n\nexport interface AutoRAGProvider {\n\t(options?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(settings?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createAutoRAG(options: AutoRAGSettings): AutoRAGProvider {\n\tconst binding = options.binding;\n\n\tconst createChatModel = (settings: AutoRAGChatSettings = {}) =>\n\t\t// @ts-ignore Needs fix from @cloudflare/workers-types for custom types\n\t\tnew AutoRAGChatLanguageModel(\"@cf/meta/llama-3.3-70b-instruct-fp8-fast\", settings, {\n\t\t\tbinding,\n\t\t\tprovider: \"autorag.chat\",\n\t\t});\n\n\tconst provider = (settings?: AutoRAGChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAAA;AAAA,EAGC;AAAA,OACM;;;ACDA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,SAAS,MAAM,SAAS,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,OAAO,KAAK;AAAA,oBACZ,UAAU,KAAK;AAAA,oBACf,kBAAkB,KAAK;AAAA,kBACxB,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,UACX,MAAM;AAAA,QACP,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YAEA,KAAK,aAAa;AACjB,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YAEA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,UAAU;AAAA,kBACT,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,kBACnC,MAAM,KAAK;AAAA,gBACZ;AAAA,gBACA,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,cACP,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,0BAA0B,gBAAgB,IAAI,EAAE;AAAA,YACjE;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,SAAS;AAAA,UACT,MAAM;AAAA,UACN,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,UAAU,EAAE,WAAW,MAAM,KAAK;AAAA,YAClC,IAAI;AAAA,YACJ,MAAM;AAAA,UACP,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,MAAM,aAAa;AAAA,YACnB,MAAM;AAAA,UACP,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,QAAQ,SAAS;AAC3B;;;ACpIO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,mBAAmB;AAAA,IACnB,eAAe;AAAA,EAChB;AAEA,SAAO;AAAA,IACN,kBAAkB,MAAM;AAAA,IACxB,cAAc,MAAM;AAAA,EACrB;AACD;;;ACdA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACdO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAE1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,QAAQ;AAChB,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GACrF,YAAY,IAAI,SAAS,KAAK,EAC/B;AAGA,UAAM,UAAU;AAAA,MACf,eAAe,UAAU,MAAM;AAAA,MAC/B,gBAAgB;AAAA,IACjB;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;AAEO,SAAS,0BACf,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,aAAa,QAAW,OAAO,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,UAAU;AAAA;AAAA,MAET,aAAa,KAAK;AAAA,MAClB,MAAM,KAAK;AAAA;AAAA,MAEX,YAAY,KAAK;AAAA,IAClB;AAAA,IACA,MAAM;AAAA,EACP,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,aAAa,QAAW,OAAO,YAAY;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,aAAa,MAAM,OAAO,YAAY;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,aAAa,MAAM,OAAO,YAAY;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,aAAa,OAAO,OAAO,YAAY;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,aAAa;AAAA,QACb,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,MAC/E;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEO,SAAS,mBAA+C,UAAe;AAC7E,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAG,SAAS;AACvE;AAEA,SAAS,sBAAsB,cAAqB;AACnD,QAAM,qBAA0B,CAAC;AAEjC,aAAW,eAAe,cAAc;AACvC,UAAM,QAAQ,YAAY;AAE1B,QAAI,CAAC,mBAAmB,KAAK,GAAG;AAC/B,yBAAmB,KAAK,IAAI;AAAA,QAC3B,UAAU;AAAA,UACT,WAAW;AAAA,UACX,MAAM,YAAY,UAAU,QAAQ;AAAA,QACrC;AAAA,QACA,IAAI,YAAY,MAAM;AAAA,QACtB,MAAM,YAAY,QAAQ;AAAA,MAC3B;AAAA,IACD,OAAO;AACN,UAAI,YAAY,IAAI;AACnB,2BAAmB,KAAK,EAAE,KAAK,YAAY;AAAA,MAC5C;AACA,UAAI,YAAY,MAAM;AACrB,2BAAmB,KAAK,EAAE,OAAO,YAAY;AAAA,MAC9C;AAEA,UAAI,YAAY,UAAU,MAAM;AAC/B,2BAAmB,KAAK,EAAE,SAAS,OAAO,YAAY,SAAS;AAAA,MAChE;AAAA,IACD;AAGA,QAAI,YAAY,UAAU,WAAW;AACpC,yBAAmB,KAAK,EAAE,SAAS,aAAa,YAAY,SAAS;AAAA,IACtE;AAAA,EACD;AAEA,SAAO,OAAO,OAAO,kBAAkB;AACxC;AAEA,SAAS,gBAAgB,UAAgD;AAExE,MAAI,SAAS,YAAY,SAAS,IAAI;AACrC,WAAO;AAAA,MACN,MACC,OAAO,SAAS,SAAS,cAAc,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,aAAa,CAAC,CAAC;AAAA,MACpD,YAAY,SAAS;AAAA,MACrB,cAAc;AAAA,MACd,UAAU,SAAS,SAAS;AAAA,IAC7B;AAAA,EACD;AACA,SAAO;AAAA,IACN,MACC,OAAO,SAAS,cAAc,WAC3B,SAAS,YACT,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,IAC3C,YAAY,SAAS;AAAA,IACrB,cAAc;AAAA,IACd,UAAU,SAAS;AAAA,EACpB;AACD;AAEO,SAAS,iBAAiB,QAAgD;AAChF,MAAI,OAAO,cAAc,MAAM,QAAQ,OAAO,UAAU,GAAG;AAC1D,WAAO,OAAO,WAAW,IAAI,CAAC,aAAkB;AAC/C,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,MACC,QAAQ,UAAU,CAAC,GAAG,SAAS,cAC/B,MAAM,QAAQ,OAAO,QAAQ,CAAC,EAAE,QAAQ,UAAU,GACjD;AACD,WAAO,OAAO,QAAQ,CAAC,EAAE,QAAQ,WAAW,IAAI,CAAC,aAAkB;AAClE,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,SAAO,CAAC;AACT;AAEO,SAAS,wBAAwB,kBAAyB;AAChE,QAAM,kBAAkB,sBAAsB,gBAAgB;AAC9D,SAAO,iBAAiB,EAAE,YAAY,gBAAgB,CAAC;AACxD;AAEO,SAAS,YAAY,QAAoD;AAE/E,MAAI,QAAQ,UAAU,CAAC,GAAG,SAAS,QAAQ,QAAQ;AAElD,WAAO,QAAQ,UAAU,CAAC,GAAG,SAAS;AAAA,EACvC;AAEA,MAAI,cAAc,QAAQ;AACzB,QAAI,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,MAAM;AACpE,aAAO,KAAK,UAAU,OAAO,QAAQ;AAAA,IACtC;AAEA,WAAO,OAAO;AAAA,EACf;AACD;;;ACzRO,SAAS,gBAAgB,UAAoB;AACnD,QAAM,aAAa,OAAO,QAAQ;AAClC,MAAI,QAAQ,EAAE,kBAAkB,GAAG,cAAc,EAAE;AACnD,QAAM,mBAA0B,CAAC;AAEjC,SAAO,IAAI,eAA0C;AAAA,IACpD,MAAM,MAAM,YAAY;AACvB,uBAAiB,SAAS,YAAY;AACrC,YAAI,CAAC,MAAM,MAAM;AAChB;AAAA,QACD;AACA,YAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,QACD;AACA,cAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,YAAI,MAAM,OAAO;AAChB,kBAAQ,kBAAkB,KAAK;AAAA,QAChC;AACA,YAAI,MAAM,YAAY;AACrB,2BAAiB,KAAK,GAAG,MAAM,UAAU;AAAA,QAC1C;AACA,cAAM,UAAU,UACf,WAAW,QAAQ;AAAA,UAClB,WAAW,MAAM;AAAA,UACjB,MAAM;AAAA,QACP,CAAC;AACF,eAAO,UAAU,CAAC,GAAG,OAAO,mBAAmB,UAC9C,WAAW,QAAQ;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,MAAM,QAAQ,CAAC,EAAE,MAAM;AAAA,QACnC,CAAC;AACF,eAAO,UAAU,CAAC,GAAG,OAAO,SAAS,UACpC,WAAW,QAAQ;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,MAAM,QAAQ,CAAC,EAAE,MAAM;AAAA,QACnC,CAAC;AAAA,MACH;AAEA,UAAI,iBAAiB,SAAS,GAAG;AAChC,cAAM,YAAY,wBAAwB,gBAAgB;AAC1D,kBAAU,IAAI,CAAC,aAAa;AAC3B,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,GAAG;AAAA,UACJ,CAAC;AAAA,QACF,CAAC;AAAA,MACF;AAEA,iBAAW,QAAQ;AAAA,QAClB,cAAc;AAAA,QACd,MAAM;AAAA,QACN;AAAA,MACD,CAAC;AACD,iBAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;;;AP1CO,IAAM,2BAAN,MAA0D;AAAA,EAShE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,UAAU,+BAA+B,MAAM;AAAA;AAAA,MAE/C,OAAO,KAAK;AAAA,IACb;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,aAAa,KAAK;AAAA,cAClB,MAAM;AAAA,YACP;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,UAAU,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACjD,OAAO,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAAA,IAC9E,CAAC;AAED,WAAO;AAAA,MACN,cAAc;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,SAAS,OAAO,KAAK,IAAI,CAAC,EAAE,SAAS,UAAU,MAAM,OAAO;AAAA,QAC3D,IAAI;AAAA,QACJ,kBAAkB;AAAA,UACjB,YAAY,EAAE,MAAM;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,QACZ,KAAK;AAAA,MACN,EAAE;AAAA;AAAA,MACF,MAAM,OAAO;AAAA,MACb,WAAW,iBAAiB,MAAM;AAAA,MAClC,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,QAAQ,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAEpF,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACnD;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACN,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,QAAQ,gBAAgB,QAAQ;AAAA,MAChC;AAAA,IACD;AAAA,EACD;AACD;;;AQ1KA,SAAgC,0CAA0C;AAqBnE,IAAM,0BAAN,MAAkE;AAAA,EA2BxE,YACC,SACA,UACA,QACC;AA1BF;AAAA;AAAA;AAAA;AAAA,wBAAS,wBAAuB;AAChC,wBAAS;AACT,wBAAiB;AACjB,wBAAiB;AAwBhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA;AAAA;AAAA;AAAA,EAtBA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,uBAA+B;AAElC,UAAM,uBAAuB,KAAK,YAAY,+BAA+B,OAAO;AACpF,WAAO,KAAK,SAAS,wBAAwB;AAAA,EAC9C;AAAA,EAEA,IAAI,wBAAiC;AACpC,WAAO,KAAK,SAAS,yBAAyB;AAAA,EAC/C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACb;AAAA,EACD,GAEE;AACD,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC9C,YAAM,IAAI,mCAAmC;AAAA,QAC5C,sBAAsB,KAAK;AAAA,QAC3B,SAAS,KAAK;AAAA,QACd,UAAU,KAAK;AAAA,QACf;AAAA,MACD,CAAC;AAAA,IACF;AAEA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAEhD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA;AAAA,MAEL;AAAA,QACC,MAAM;AAAA,MACP;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,WAAO;AAAA;AAAA,MAEN,YAAY,SAAS;AAAA,IACtB;AAAA,EACD;AACD;;;ACxFA;AAAA,EAIC,iCAAAC;AAAA,OACM;;;ACHA,SAAS,yBAAyB,wBAA0D;AAClG,MAAI;AAGJ,MACC,OAAO,2BAA2B,YAClC,2BAA2B,QAC3B,2BAA2B,QAC1B;AACD,mBAAe;AAAA,EAChB,WAAW,OAAO,2BAA2B,YAAY,2BAA2B,MAAM;AACzF,UAAM,WAAW;AAEjB,QACC,aAAa,YACb,MAAM,QAAQ,SAAS,OAAO,KAC9B,SAAS,QAAQ,SAAS,GACzB;AACD,qBAAe,SAAS,QAAQ,CAAC,EAAE;AAAA,IACpC,WAAW,mBAAmB,UAAU;AACvC,qBAAe,SAAS;AAAA,IACzB,OAAO;AACN,qBAAe;AAAA,IAChB;AAAA,EACD;AAEA,UAAQ,cAAc;AAAA,IACrB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AAAA,IACL,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR;AAEC,aAAO;AAAA,EACT;AACD;;;ADrBO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,YAAY;AAAA;AAAA,MAEZ,OAAO,KAAK;AAAA,MACZ,aAAa;AAAA;AAAA,MAGb,aAAa,KAAK,SAAS;AAAA,MAC3B;AAAA,MACA,OAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,aAAa,KAAK;AAAA,cAClB,MAAM;AAAA,YACP;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,UAAU,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAIC,+BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAG1E,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,cAAc,yBAAyB,MAAM;AAAA,MAC7C,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,aAAa,EAAE,MAAM,OAAO;AAAA,MAC5B,MAAM,YAAY,MAAM;AAAA,MACxB,WAAW,iBAAiB,MAAM;AAAA;AAAA,MAElC,WAAW,QAAQ,UAAU,CAAC,GAAG,SAAS;AAAA,MAC1C,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAK1E,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,WAAWA,UAAS;AAAA,gBACpB,MAAM;AAAA,cACP,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,gBAAIA,UAAS,aAAa,OAAOA,UAAS,cAAc,UAAU;AACjE,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,uBAAW,QAAQ;AAAA,cAClB,cAAc,yBAAyBA,SAAQ;AAAA,cAC/C,MAAM;AAAA,cACN,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,QAAQ,gBAAgB,IAAI,SAAS,QAAQ,CAAC;AAAA,MAC9C;AAAA,IACD;AAAA,EACD;AACD;;;AEnRO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,SAAS;AAAA,QACT,SAAS;AAAA,QACT,MAAM;AAAA,MACP,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA,UAAU;AAAA,QACT,SAAS,CAAC;AAAA,QACV,SAAS,KAAK;AAAA,QACd,WAAW,oBAAI,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,QAAQ,aAAa,MAAM;AAAA,IAC3B,OAAO,aAAa,KAAK;AAAA,EAC1B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;AC/BO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AACF,QAAM,uBAAuB,CAC5B,SACA,WAAuC,CAAC,MAExC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC9C;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,EACX,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;AAiBO,SAAS,cAAc,SAA2C;AACxE,QAAM,UAAU,QAAQ;AAExB,QAAM,kBAAkB,CAAC,WAAgC,CAAC;AAAA;AAAA,IAEzD,IAAI,yBAAyB,4CAA4C,UAAU;AAAA,MAClF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA;AAEF,QAAM,WAAW,CAAC,aAAmC;AACpD,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,QAAQ;AAAA,EAChC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACR;","names":["split","UnsupportedFunctionalityError","UnsupportedFunctionalityError","response","stream"]}
|
package/package.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
"name": "workers-ai-provider",
|
3
3
|
"description": "Workers AI Provider for the vercel AI SDK",
|
4
4
|
"type": "module",
|
5
|
-
"version": "0.7.
|
5
|
+
"version": "0.7.2",
|
6
6
|
"main": "dist/index.js",
|
7
7
|
"types": "dist/index.d.ts",
|
8
8
|
"repository": {
|
@@ -35,7 +35,7 @@
|
|
35
35
|
"@ai-sdk/provider-utils": "^2.2.8"
|
36
36
|
},
|
37
37
|
"devDependencies": {
|
38
|
-
"@cloudflare/workers-types": "^4.
|
38
|
+
"@cloudflare/workers-types": "^4.20250701.0",
|
39
39
|
"zod": "^3.25.67"
|
40
40
|
},
|
41
41
|
"scripts": {
|
package/src/streaming.ts
CHANGED
@@ -34,6 +34,11 @@ export function getMappedStream(response: Response) {
|
|
34
34
|
type: "reasoning",
|
35
35
|
textDelta: chunk.choices[0].delta.reasoning_content,
|
36
36
|
});
|
37
|
+
chunk?.choices?.[0]?.delta?.content?.length &&
|
38
|
+
controller.enqueue({
|
39
|
+
type: "text-delta",
|
40
|
+
textDelta: chunk.choices[0].delta.content,
|
41
|
+
});
|
37
42
|
}
|
38
43
|
|
39
44
|
if (partialToolCalls.length > 0) {
|
package/src/utils.ts
CHANGED
@@ -269,3 +269,19 @@ export function processPartialToolCalls(partialToolCalls: any[]) {
|
|
269
269
|
const mergedToolCalls = mergePartialToolCalls(partialToolCalls);
|
270
270
|
return processToolCalls({ tool_calls: mergedToolCalls });
|
271
271
|
}
|
272
|
+
|
273
|
+
export function processText(output: AiTextGenerationOutput): string | undefined {
|
274
|
+
// @ts-expect-error OpenAI format not typed yet
|
275
|
+
if (output?.choices?.[0]?.message?.content.length) {
|
276
|
+
// @ts-expect-error OpenAI format not typed yet
|
277
|
+
return output?.choices?.[0]?.message?.content;
|
278
|
+
}
|
279
|
+
|
280
|
+
if ("response" in output) {
|
281
|
+
if (typeof output.response === "object" && output.response !== null) {
|
282
|
+
return JSON.stringify(output.response); // ai-sdk expects a string here
|
283
|
+
}
|
284
|
+
|
285
|
+
return output.response;
|
286
|
+
}
|
287
|
+
}
|
@@ -8,7 +8,12 @@ import { convertToWorkersAIChatMessages } from "./convert-to-workersai-chat-mess
|
|
8
8
|
import { mapWorkersAIFinishReason } from "./map-workersai-finish-reason";
|
9
9
|
import { mapWorkersAIUsage } from "./map-workersai-usage";
|
10
10
|
import { getMappedStream } from "./streaming";
|
11
|
-
import {
|
11
|
+
import {
|
12
|
+
lastMessageWasUser,
|
13
|
+
prepareToolsAndToolChoice,
|
14
|
+
processText,
|
15
|
+
processToolCalls,
|
16
|
+
} from "./utils";
|
12
17
|
import type { WorkersAIChatSettings } from "./workersai-chat-settings";
|
13
18
|
import type { TextGenerationModels } from "./workersai-models";
|
14
19
|
|
@@ -172,10 +177,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
172
177
|
finishReason: mapWorkersAIFinishReason(output),
|
173
178
|
rawCall: { rawPrompt: messages, rawSettings: args },
|
174
179
|
rawResponse: { body: output },
|
175
|
-
text:
|
176
|
-
typeof output.response === "object" && output.response !== null
|
177
|
-
? JSON.stringify(output.response) // ai-sdk expects a string here
|
178
|
-
: output.response,
|
180
|
+
text: processText(output),
|
179
181
|
toolCalls: processToolCalls(output),
|
180
182
|
// @ts-ignore: Missing types
|
181
183
|
reasoning: output?.choices?.[0]?.message?.reasoning_content,
|