workers-ai-provider 0.5.2 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -378,7 +378,6 @@ function getMappedStream(response) {
378
378
  }
379
379
  if (chunk.tool_calls) {
380
380
  partialToolCalls.push(...chunk.tool_calls);
381
- continue;
382
381
  }
383
382
  chunk.response?.length && controller.enqueue({
384
383
  type: "text-delta",
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/autorag-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../src/map-workersai-usage.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/utils.ts","../src/streaming.ts","../src/workers-ai-embedding-model.ts","../src/workersai-chat-language-model.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype AutoRAGChatConfig = {\n\tprovider: string;\n\tbinding: AutoRAG;\n\tgateway?: GatewayOptions;\n};\n\nexport class AutoRAGChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: AutoRAGChatSettings;\n\n\tprivate readonly config: AutoRAGChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: AutoRAGChatSettings,\n\t\tconfig: AutoRAGChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst output = await this.config.binding.aiSearch({\n\t\t\tquery: messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\"),\n\t\t});\n\n\t\treturn {\n\t\t\ttext: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t\tsources: output.data.map(({ file_id, filename, score }) => ({\n\t\t\t\tid: file_id,\n\t\t\t\tsourceType: \"url\",\n\t\t\t\turl: filename,\n\t\t\t\tproviderMetadata: {\n\t\t\t\t\tattributes: { score },\n\t\t\t\t},\n\t\t\t})),\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst query = messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\");\n\n\t\tconst response = await this.config.binding.aiSearch({\n\t\t\tquery,\n\t\t\tstream: true,\n\t\t});\n\n\t\treturn {\n\t\t\tstream: getMappedStream(response),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1Prompt, LanguageModelV1ProviderMetadata } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata: part.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part: ${exhaustiveCheck}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { messages, images };\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","import type { LanguageModelV1, LanguageModelV1FunctionToolCall } from \"@ai-sdk/provider\";\n\n/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${\n\t\t\turlParams ? `?${urlParams}` : \"\"\n\t\t}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n\nexport function prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nexport function lastMessageWasUser<T extends { role: string }>(messages: T[]) {\n\treturn messages.length > 0 && messages[messages.length - 1]!.role === \"user\";\n}\n\nfunction mergePartialToolCalls(partialCalls: any[]) {\n\tconst mergedCallsByIndex: any = {};\n\n\tfor (const partialCall of partialCalls) {\n\t\tconst index = partialCall.index;\n\n\t\tif (!mergedCallsByIndex[index]) {\n\t\t\tmergedCallsByIndex[index] = {\n\t\t\t\tid: partialCall.id || \"\",\n\t\t\t\ttype: partialCall.type || \"\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: partialCall.function?.name || \"\",\n\t\t\t\t\targuments: \"\",\n\t\t\t\t},\n\t\t\t};\n\t\t} else {\n\t\t\tif (partialCall.id) {\n\t\t\t\tmergedCallsByIndex[index].id = partialCall.id;\n\t\t\t}\n\t\t\tif (partialCall.type) {\n\t\t\t\tmergedCallsByIndex[index].type = partialCall.type;\n\t\t\t}\n\n\t\t\tif (partialCall.function?.name) {\n\t\t\t\tmergedCallsByIndex[index].function.name = partialCall.function.name;\n\t\t\t}\n\t\t}\n\n\t\t// Append arguments if available, this assumes arguments come in the right order\n\t\tif (partialCall.function?.arguments) {\n\t\t\tmergedCallsByIndex[index].function.arguments += partialCall.function.arguments;\n\t\t}\n\t}\n\n\treturn Object.values(mergedCallsByIndex);\n}\n\nfunction processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {\n\tif (toolCall.function && toolCall.id) {\n\t\treturn {\n\t\t\ttoolCallType: \"function\",\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolName: toolCall.function.name,\n\t\t\targs:\n\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {}),\n\t\t};\n\t}\n\treturn {\n\t\ttoolCallType: \"function\",\n\t\ttoolCallId: toolCall.name,\n\t\ttoolName: toolCall.name,\n\t\targs:\n\t\t\ttypeof toolCall.arguments === \"string\"\n\t\t\t\t? toolCall.arguments\n\t\t\t\t: JSON.stringify(toolCall.arguments || {}),\n\t};\n}\n\nexport function processToolCalls(output: any): LanguageModelV1FunctionToolCall[] {\n\t// Check for OpenAI format tool calls first\n\tif (output.tool_calls && Array.isArray(output.tool_calls)) {\n\t\treturn output.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\treturn [];\n}\n\nexport function processPartialToolCalls(partialToolCalls: any[]) {\n\tconst mergedToolCalls = mergePartialToolCalls(partialToolCalls);\n\treturn processToolCalls({ tool_calls: mergedToolCalls });\n}\n","import { events } from \"fetch-event-stream\";\n\nimport type { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { processPartialToolCalls } from \"./utils\";\n\nexport function getMappedStream(response: Response) {\n\tconst chunkEvent = events(response);\n\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\tconst partialToolCalls: any[] = [];\n\n\treturn new ReadableStream<LanguageModelV1StreamPart>({\n\t\tasync start(controller) {\n\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\tif (!event.data) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t}\n\t\t\t\tif (chunk.tool_calls) {\n\t\t\t\t\tpartialToolCalls.push(...chunk.tool_calls);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tchunk.response?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t});\n\t\t\t}\n\n\t\t\tif (partialToolCalls.length > 0) {\n\t\t\t\tconst toolCalls = processPartialToolCalls(partialToolCalls);\n\t\t\t\ttoolCalls.map((toolCall) => {\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t});\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontroller.enqueue({\n\t\t\t\ttype: \"finish\",\n\t\t\t\tfinishReason: \"stop\",\n\t\t\t\tusage: usage,\n\t\t\t});\n\t\t\tcontroller.close();\n\t\t},\n\t});\n}\n","import { TooManyEmbeddingValuesForCallError, type EmbeddingModelV1 } from \"@ai-sdk/provider\";\nimport type { StringLike } from \"./utils\";\nimport type { EmbeddingModels } from \"./workersai-models\";\n\nexport type WorkersAIEmbeddingConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport type WorkersAIEmbeddingSettings = {\n\tgateway?: GatewayOptions;\n\tmaxEmbeddingsPerCall?: number;\n\tsupportsParallelCalls?: boolean;\n} & {\n\t/**\n\t * Arbitrary provider-specific options forwarded unmodified.\n\t */\n\t[key: string]: StringLike;\n};\n\nexport class WorkersAIEmbeddingModel implements EmbeddingModelV1<string> {\n\t/**\n\t * Semantic version of the {@link EmbeddingModelV1} specification implemented\n\t * by this class. It never changes.\n\t */\n\treadonly specificationVersion = \"v1\";\n\treadonly modelId: EmbeddingModels;\n\tprivate readonly config: WorkersAIEmbeddingConfig;\n\tprivate readonly settings: WorkersAIEmbeddingSettings;\n\n\t/**\n\t * Provider name exposed for diagnostics and error reporting.\n\t */\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tget maxEmbeddingsPerCall(): number {\n\t\t// https://developers.cloudflare.com/workers-ai/platform/limits/#text-embeddings\n\t\tconst maxEmbeddingsPerCall = this.modelId === \"@cf/baai/bge-large-en-v1.5\" ? 1500 : 3000;\n\t\treturn this.settings.maxEmbeddingsPerCall ?? maxEmbeddingsPerCall;\n\t}\n\n\tget supportsParallelCalls(): boolean {\n\t\treturn this.settings.supportsParallelCalls ?? true;\n\t}\n\n\tconstructor(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings,\n\t\tconfig: WorkersAIEmbeddingConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tasync doEmbed({\n\t\tvalues,\n\t}: Parameters<EmbeddingModelV1<string>[\"doEmbed\"]>[0]): Promise<\n\t\tAwaited<ReturnType<EmbeddingModelV1<string>[\"doEmbed\"]>>\n\t> {\n\t\tif (values.length > this.maxEmbeddingsPerCall) {\n\t\t\tthrow new TooManyEmbeddingValuesForCallError({\n\t\t\t\tprovider: this.provider,\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\tmaxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n\t\t\t\tvalues,\n\t\t\t});\n\t\t}\n\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\tconst response = await this.config.binding.run(\n\t\t\tthis.modelId,\n\t\t\t{\n\t\t\t\ttext: values,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\treturn {\n\t\t\tembeddings: response.data,\n\t\t};\n\t}\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { lastMessageWasUser, prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\tstream: getMappedStream(new Response(response)),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { AutoRAGChatLanguageModel } from \"./autorag-chat-language-model\";\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { createRun } from \"./utils\";\nimport {\n\tWorkersAIEmbeddingModel,\n\ttype WorkersAIEmbeddingSettings,\n} from \"./workers-ai-embedding-model\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type {\n\tEmbeddingModels,\n\tImageGenerationModels,\n\tTextGenerationModels,\n} from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\tembedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbeddingModel(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\tconst createEmbeddingModel = (\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings = {},\n\t) =>\n\t\tnew WorkersAIEmbeddingModel(modelId, settings, {\n\t\t\tprovider: \"workersai.embedding\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.embedding = createEmbeddingModel;\n\tprovider.textEmbedding = createEmbeddingModel;\n\tprovider.textEmbeddingModel = createEmbeddingModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n\nexport type AutoRAGSettings = {\n\tbinding: AutoRAG;\n};\n\nexport interface AutoRAGProvider {\n\t(options?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(settings?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createAutoRAG(options: AutoRAGSettings): AutoRAGProvider {\n\tconst binding = options.binding;\n\n\tconst createChatModel = (settings: AutoRAGChatSettings = {}) =>\n\t\tnew AutoRAGChatLanguageModel(\"@cf/meta/llama-3.3-70b-instruct-fp8-fast\", settings, {\n\t\t\tprovider: \"autorag.chat\",\n\t\t\tbinding,\n\t\t});\n\n\tconst provider = (settings?: AutoRAGChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAAA;AAAA,EAGC;AAAA,OACM;;;ACDA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,UAAU,KAAK;AAAA,oBACf,OAAO,KAAK;AAAA,oBACZ,kBAAkB,KAAK;AAAA,kBACxB,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,QACZ,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACvD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACnC,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO;AAC3B;;;AC5HO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ACdA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACdO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GACrF,YAAY,IAAI,SAAS,KAAK,EAC/B;AAGA,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;AAEO,SAAS,0BACf,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,QAC9E,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEO,SAAS,mBAA+C,UAAe;AAC7E,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAG,SAAS;AACvE;AAEA,SAAS,sBAAsB,cAAqB;AACnD,QAAM,qBAA0B,CAAC;AAEjC,aAAW,eAAe,cAAc;AACvC,UAAM,QAAQ,YAAY;AAE1B,QAAI,CAAC,mBAAmB,KAAK,GAAG;AAC/B,yBAAmB,KAAK,IAAI;AAAA,QAC3B,IAAI,YAAY,MAAM;AAAA,QACtB,MAAM,YAAY,QAAQ;AAAA,QAC1B,UAAU;AAAA,UACT,MAAM,YAAY,UAAU,QAAQ;AAAA,UACpC,WAAW;AAAA,QACZ;AAAA,MACD;AAAA,IACD,OAAO;AACN,UAAI,YAAY,IAAI;AACnB,2BAAmB,KAAK,EAAE,KAAK,YAAY;AAAA,MAC5C;AACA,UAAI,YAAY,MAAM;AACrB,2BAAmB,KAAK,EAAE,OAAO,YAAY;AAAA,MAC9C;AAEA,UAAI,YAAY,UAAU,MAAM;AAC/B,2BAAmB,KAAK,EAAE,SAAS,OAAO,YAAY,SAAS;AAAA,MAChE;AAAA,IACD;AAGA,QAAI,YAAY,UAAU,WAAW;AACpC,yBAAmB,KAAK,EAAE,SAAS,aAAa,YAAY,SAAS;AAAA,IACtE;AAAA,EACD;AAEA,SAAO,OAAO,OAAO,kBAAkB;AACxC;AAEA,SAAS,gBAAgB,UAAgD;AACxE,MAAI,SAAS,YAAY,SAAS,IAAI;AACrC,WAAO;AAAA,MACN,cAAc;AAAA,MACd,YAAY,SAAS;AAAA,MACrB,UAAU,SAAS,SAAS;AAAA,MAC5B,MACC,OAAO,SAAS,SAAS,cAAc,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,aAAa,CAAC,CAAC;AAAA,IACrD;AAAA,EACD;AACA,SAAO;AAAA,IACN,cAAc;AAAA,IACd,YAAY,SAAS;AAAA,IACrB,UAAU,SAAS;AAAA,IACnB,MACC,OAAO,SAAS,cAAc,WAC3B,SAAS,YACT,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,EAC5C;AACD;AAEO,SAAS,iBAAiB,QAAgD;AAEhF,MAAI,OAAO,cAAc,MAAM,QAAQ,OAAO,UAAU,GAAG;AAC1D,WAAO,OAAO,WAAW,IAAI,CAAC,aAAkB;AAC/C,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,SAAO,CAAC;AACT;AAEO,SAAS,wBAAwB,kBAAyB;AAChE,QAAM,kBAAkB,sBAAsB,gBAAgB;AAC9D,SAAO,iBAAiB,EAAE,YAAY,gBAAgB,CAAC;AACxD;;;AC7PO,SAAS,gBAAgB,UAAoB;AACnD,QAAM,aAAa,OAAO,QAAQ;AAClC,MAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AACnD,QAAM,mBAA0B,CAAC;AAEjC,SAAO,IAAI,eAA0C;AAAA,IACpD,MAAM,MAAM,YAAY;AACvB,uBAAiB,SAAS,YAAY;AACrC,YAAI,CAAC,MAAM,MAAM;AAChB;AAAA,QACD;AACA,YAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,QACD;AACA,cAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,YAAI,MAAM,OAAO;AAChB,kBAAQ,kBAAkB,KAAK;AAAA,QAChC;AACA,YAAI,MAAM,YAAY;AACrB,2BAAiB,KAAK,GAAG,MAAM,UAAU;AACzC;AAAA,QACD;AACA,cAAM,UAAU,UACf,WAAW,QAAQ;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,MAAM;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,UAAI,iBAAiB,SAAS,GAAG;AAChC,cAAM,YAAY,wBAAwB,gBAAgB;AAC1D,kBAAU,IAAI,CAAC,aAAa;AAC3B,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,GAAG;AAAA,UACJ,CAAC;AAAA,QACF,CAAC;AAAA,MACF;AAEA,iBAAW,QAAQ;AAAA,QAClB,MAAM;AAAA,QACN,cAAc;AAAA,QACd;AAAA,MACD,CAAC;AACD,iBAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;;;APlCO,IAAM,2BAAN,MAA0D;AAAA,EAShE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,UAAU,+BAA+B,MAAM;AAAA,IAChD;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACjD,OAAO,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAAA,IAC9E,CAAC;AAED,WAAO;AAAA,MACN,MAAM,OAAO;AAAA,MACb,WAAW,iBAAiB,MAAM;AAAA,MAClC,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,OAAO,KAAK,IAAI,CAAC,EAAE,SAAS,UAAU,MAAM,OAAO;AAAA,QAC3D,IAAI;AAAA,QACJ,YAAY;AAAA,QACZ,KAAK;AAAA,QACL,kBAAkB;AAAA,UACjB,YAAY,EAAE,MAAM;AAAA,QACrB;AAAA,MACD,EAAE;AAAA,IACH;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,QAAQ,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAEpF,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACnD;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACN,QAAQ,gBAAgB,QAAQ;AAAA,MAChC,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AACD;;;AQ3KA,SAAS,0CAAiE;AAqBnE,IAAM,0BAAN,MAAkE;AAAA,EA2BxE,YACC,SACA,UACA,QACC;AA1BF;AAAA;AAAA;AAAA;AAAA,wBAAS,wBAAuB;AAChC,wBAAS;AACT,wBAAiB;AACjB,wBAAiB;AAwBhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA;AAAA;AAAA;AAAA,EAtBA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,uBAA+B;AAElC,UAAM,uBAAuB,KAAK,YAAY,+BAA+B,OAAO;AACpF,WAAO,KAAK,SAAS,wBAAwB;AAAA,EAC9C;AAAA,EAEA,IAAI,wBAAiC;AACpC,WAAO,KAAK,SAAS,yBAAyB;AAAA,EAC/C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACb;AAAA,EACD,GAEE;AACD,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC9C,YAAM,IAAI,mCAAmC;AAAA,QAC5C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACD,CAAC;AAAA,IACF;AAEA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAEhD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,MAAM;AAAA,MACP;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,WAAO;AAAA,MACN,YAAY,SAAS;AAAA,IACtB;AAAA,EACD;AACD;;;ACtFA;AAAA,EAIC,iCAAAC;AAAA,OACM;AAeA,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,IACd;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAIC,+BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAG1E,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,iBAAiB,MAAM;AAAA,MAClC,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAK1E,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,QAAQ,gBAAgB,IAAI,SAAS,QAAQ,CAAC;AAAA,MAC9C,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD;AAAA,IACD;AAAA,EACD;AACD;;;ACxQO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;AC/BO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AACF,QAAM,uBAAuB,CAC5B,SACA,WAAuC,CAAC,MAExC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;AAiBO,SAAS,cAAc,SAA2C;AACxE,QAAM,UAAU,QAAQ;AAExB,QAAM,kBAAkB,CAAC,WAAgC,CAAC,MACzD,IAAI,yBAAyB,4CAA4C,UAAU;AAAA,IAClF,UAAU;AAAA,IACV;AAAA,EACD,CAAC;AAEF,QAAM,WAAW,CAAC,aAAmC;AACpD,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,QAAQ;AAAA,EAChC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACR;","names":["split","UnsupportedFunctionalityError","UnsupportedFunctionalityError","response","stream"]}
1
+ {"version":3,"sources":["../src/autorag-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../src/map-workersai-usage.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/utils.ts","../src/streaming.ts","../src/workers-ai-embedding-model.ts","../src/workersai-chat-language-model.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\ntype AutoRAGChatConfig = {\n\tprovider: string;\n\tbinding: AutoRAG;\n\tgateway?: GatewayOptions;\n};\n\nexport class AutoRAGChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: AutoRAGChatSettings;\n\n\tprivate readonly config: AutoRAGChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: AutoRAGChatSettings,\n\t\tconfig: AutoRAGChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst output = await this.config.binding.aiSearch({\n\t\t\tquery: messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\"),\n\t\t});\n\n\t\treturn {\n\t\t\ttext: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t\tsources: output.data.map(({ file_id, filename, score }) => ({\n\t\t\t\tid: file_id,\n\t\t\t\tsourceType: \"url\",\n\t\t\t\turl: filename,\n\t\t\t\tproviderMetadata: {\n\t\t\t\t\tattributes: { score },\n\t\t\t\t},\n\t\t\t})),\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { messages } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\tconst query = messages.map(({ content, role }) => `${role}: ${content}`).join(\"\\n\\n\");\n\n\t\tconst response = await this.config.binding.aiSearch({\n\t\t\tquery,\n\t\t\tstream: true,\n\t\t});\n\n\t\treturn {\n\t\t\tstream: getMappedStream(response),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { LanguageModelV1Prompt, LanguageModelV1ProviderMetadata } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata: part.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part: ${exhaustiveCheck}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { messages, images };\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","import type { LanguageModelV1, LanguageModelV1FunctionToolCall } from \"@ai-sdk/provider\";\n\n/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${\n\t\t\turlParams ? `?${urlParams}` : \"\"\n\t\t}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n\nexport function prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nexport function lastMessageWasUser<T extends { role: string }>(messages: T[]) {\n\treturn messages.length > 0 && messages[messages.length - 1]!.role === \"user\";\n}\n\nfunction mergePartialToolCalls(partialCalls: any[]) {\n\tconst mergedCallsByIndex: any = {};\n\n\tfor (const partialCall of partialCalls) {\n\t\tconst index = partialCall.index;\n\n\t\tif (!mergedCallsByIndex[index]) {\n\t\t\tmergedCallsByIndex[index] = {\n\t\t\t\tid: partialCall.id || \"\",\n\t\t\t\ttype: partialCall.type || \"\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: partialCall.function?.name || \"\",\n\t\t\t\t\targuments: \"\",\n\t\t\t\t},\n\t\t\t};\n\t\t} else {\n\t\t\tif (partialCall.id) {\n\t\t\t\tmergedCallsByIndex[index].id = partialCall.id;\n\t\t\t}\n\t\t\tif (partialCall.type) {\n\t\t\t\tmergedCallsByIndex[index].type = partialCall.type;\n\t\t\t}\n\n\t\t\tif (partialCall.function?.name) {\n\t\t\t\tmergedCallsByIndex[index].function.name = partialCall.function.name;\n\t\t\t}\n\t\t}\n\n\t\t// Append arguments if available, this assumes arguments come in the right order\n\t\tif (partialCall.function?.arguments) {\n\t\t\tmergedCallsByIndex[index].function.arguments += partialCall.function.arguments;\n\t\t}\n\t}\n\n\treturn Object.values(mergedCallsByIndex);\n}\n\nfunction processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {\n\t// Check for OpenAI format tool calls first\n\tif (toolCall.function && toolCall.id) {\n\t\treturn {\n\t\t\ttoolCallType: \"function\",\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolName: toolCall.function.name,\n\t\t\targs:\n\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {}),\n\t\t};\n\t}\n\treturn {\n\t\ttoolCallType: \"function\",\n\t\ttoolCallId: toolCall.name,\n\t\ttoolName: toolCall.name,\n\t\targs:\n\t\t\ttypeof toolCall.arguments === \"string\"\n\t\t\t\t? toolCall.arguments\n\t\t\t\t: JSON.stringify(toolCall.arguments || {}),\n\t};\n}\n\nexport function processToolCalls(output: any): LanguageModelV1FunctionToolCall[] {\n\tif (output.tool_calls && Array.isArray(output.tool_calls)) {\n\t\treturn output.tool_calls.map((toolCall: any) => {\n\t\t\tconst processedToolCall = processToolCall(toolCall);\n\t\t\treturn processedToolCall;\n\t\t});\n\t}\n\n\treturn [];\n}\n\nexport function processPartialToolCalls(partialToolCalls: any[]) {\n\tconst mergedToolCalls = mergePartialToolCalls(partialToolCalls);\n\treturn processToolCalls({ tool_calls: mergedToolCalls });\n}\n","import { events } from \"fetch-event-stream\";\n\nimport type { LanguageModelV1StreamPart } from \"@ai-sdk/provider\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { processPartialToolCalls } from \"./utils\";\n\nexport function getMappedStream(response: Response) {\n\tconst chunkEvent = events(response);\n\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\tconst partialToolCalls: any[] = [];\n\n\treturn new ReadableStream<LanguageModelV1StreamPart>({\n\t\tasync start(controller) {\n\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\tif (!event.data) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t}\n\t\t\t\tif (chunk.tool_calls) {\n\t\t\t\t\tpartialToolCalls.push(...chunk.tool_calls);\n\t\t\t\t}\n\t\t\t\tchunk.response?.length &&\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t});\n\t\t\t}\n\n\t\t\tif (partialToolCalls.length > 0) {\n\t\t\t\tconst toolCalls = processPartialToolCalls(partialToolCalls);\n\t\t\t\ttoolCalls.map((toolCall) => {\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t});\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontroller.enqueue({\n\t\t\t\ttype: \"finish\",\n\t\t\t\tfinishReason: \"stop\",\n\t\t\t\tusage: usage,\n\t\t\t});\n\t\t\tcontroller.close();\n\t\t},\n\t});\n}\n","import { TooManyEmbeddingValuesForCallError, type EmbeddingModelV1 } from \"@ai-sdk/provider\";\nimport type { StringLike } from \"./utils\";\nimport type { EmbeddingModels } from \"./workersai-models\";\n\nexport type WorkersAIEmbeddingConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport type WorkersAIEmbeddingSettings = {\n\tgateway?: GatewayOptions;\n\tmaxEmbeddingsPerCall?: number;\n\tsupportsParallelCalls?: boolean;\n} & {\n\t/**\n\t * Arbitrary provider-specific options forwarded unmodified.\n\t */\n\t[key: string]: StringLike;\n};\n\nexport class WorkersAIEmbeddingModel implements EmbeddingModelV1<string> {\n\t/**\n\t * Semantic version of the {@link EmbeddingModelV1} specification implemented\n\t * by this class. It never changes.\n\t */\n\treadonly specificationVersion = \"v1\";\n\treadonly modelId: EmbeddingModels;\n\tprivate readonly config: WorkersAIEmbeddingConfig;\n\tprivate readonly settings: WorkersAIEmbeddingSettings;\n\n\t/**\n\t * Provider name exposed for diagnostics and error reporting.\n\t */\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tget maxEmbeddingsPerCall(): number {\n\t\t// https://developers.cloudflare.com/workers-ai/platform/limits/#text-embeddings\n\t\tconst maxEmbeddingsPerCall = this.modelId === \"@cf/baai/bge-large-en-v1.5\" ? 1500 : 3000;\n\t\treturn this.settings.maxEmbeddingsPerCall ?? maxEmbeddingsPerCall;\n\t}\n\n\tget supportsParallelCalls(): boolean {\n\t\treturn this.settings.supportsParallelCalls ?? true;\n\t}\n\n\tconstructor(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings,\n\t\tconfig: WorkersAIEmbeddingConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tasync doEmbed({\n\t\tvalues,\n\t}: Parameters<EmbeddingModelV1<string>[\"doEmbed\"]>[0]): Promise<\n\t\tAwaited<ReturnType<EmbeddingModelV1<string>[\"doEmbed\"]>>\n\t> {\n\t\tif (values.length > this.maxEmbeddingsPerCall) {\n\t\t\tthrow new TooManyEmbeddingValuesForCallError({\n\t\t\t\tprovider: this.provider,\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\tmaxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n\t\t\t\tvalues,\n\t\t\t});\n\t\t}\n\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\tconst response = await this.config.binding.run(\n\t\t\tthis.modelId,\n\t\t\t{\n\t\t\t\ttext: values,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\treturn {\n\t\t\tembeddings: response.data,\n\t\t};\n\t}\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport { getMappedStream } from \"./streaming\";\nimport { lastMessageWasUser, prepareToolsAndToolChoice, processToolCalls } from \"./utils\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(options.prompt);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\tstream: getMappedStream(new Response(response)),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { AutoRAGChatLanguageModel } from \"./autorag-chat-language-model\";\nimport type { AutoRAGChatSettings } from \"./autorag-chat-settings\";\nimport { createRun } from \"./utils\";\nimport {\n\tWorkersAIEmbeddingModel,\n\ttype WorkersAIEmbeddingSettings,\n} from \"./workers-ai-embedding-model\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type {\n\tEmbeddingModels,\n\tImageGenerationModels,\n\tTextGenerationModels,\n} from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\tembedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbedding(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\ttextEmbeddingModel(\n\t\tmodelId: EmbeddingModels,\n\t\tsettings?: WorkersAIEmbeddingSettings,\n\t): WorkersAIEmbeddingModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\tconst createEmbeddingModel = (\n\t\tmodelId: EmbeddingModels,\n\t\tsettings: WorkersAIEmbeddingSettings = {},\n\t) =>\n\t\tnew WorkersAIEmbeddingModel(modelId, settings, {\n\t\t\tprovider: \"workersai.embedding\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.embedding = createEmbeddingModel;\n\tprovider.textEmbedding = createEmbeddingModel;\n\tprovider.textEmbeddingModel = createEmbeddingModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n\nexport type AutoRAGSettings = {\n\tbinding: AutoRAG;\n};\n\nexport interface AutoRAGProvider {\n\t(options?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(settings?: AutoRAGChatSettings): AutoRAGChatLanguageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createAutoRAG(options: AutoRAGSettings): AutoRAGProvider {\n\tconst binding = options.binding;\n\n\tconst createChatModel = (settings: AutoRAGChatSettings = {}) =>\n\t\tnew AutoRAGChatLanguageModel(\"@cf/meta/llama-3.3-70b-instruct-fp8-fast\", settings, {\n\t\t\tprovider: \"autorag.chat\",\n\t\t\tbinding,\n\t\t});\n\n\tconst provider = (settings?: AutoRAGChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAAA;AAAA,EAGC;AAAA,OACM;;;ACDA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,UAAU,KAAK;AAAA,oBACf,OAAO,KAAK;AAAA,oBACZ,kBAAkB,KAAK;AAAA,kBACxB,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,QACZ,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACvD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACnC,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO;AAC3B;;;AC5HO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ACdA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACdO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GACrF,YAAY,IAAI,SAAS,KAAK,EAC/B;AAGA,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;AAEO,SAAS,0BACf,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,QAC9E,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEO,SAAS,mBAA+C,UAAe;AAC7E,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAG,SAAS;AACvE;AAEA,SAAS,sBAAsB,cAAqB;AACnD,QAAM,qBAA0B,CAAC;AAEjC,aAAW,eAAe,cAAc;AACvC,UAAM,QAAQ,YAAY;AAE1B,QAAI,CAAC,mBAAmB,KAAK,GAAG;AAC/B,yBAAmB,KAAK,IAAI;AAAA,QAC3B,IAAI,YAAY,MAAM;AAAA,QACtB,MAAM,YAAY,QAAQ;AAAA,QAC1B,UAAU;AAAA,UACT,MAAM,YAAY,UAAU,QAAQ;AAAA,UACpC,WAAW;AAAA,QACZ;AAAA,MACD;AAAA,IACD,OAAO;AACN,UAAI,YAAY,IAAI;AACnB,2BAAmB,KAAK,EAAE,KAAK,YAAY;AAAA,MAC5C;AACA,UAAI,YAAY,MAAM;AACrB,2BAAmB,KAAK,EAAE,OAAO,YAAY;AAAA,MAC9C;AAEA,UAAI,YAAY,UAAU,MAAM;AAC/B,2BAAmB,KAAK,EAAE,SAAS,OAAO,YAAY,SAAS;AAAA,MAChE;AAAA,IACD;AAGA,QAAI,YAAY,UAAU,WAAW;AACpC,yBAAmB,KAAK,EAAE,SAAS,aAAa,YAAY,SAAS;AAAA,IACtE;AAAA,EACD;AAEA,SAAO,OAAO,OAAO,kBAAkB;AACxC;AAEA,SAAS,gBAAgB,UAAgD;AAExE,MAAI,SAAS,YAAY,SAAS,IAAI;AACrC,WAAO;AAAA,MACN,cAAc;AAAA,MACd,YAAY,SAAS;AAAA,MACrB,UAAU,SAAS,SAAS;AAAA,MAC5B,MACC,OAAO,SAAS,SAAS,cAAc,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,aAAa,CAAC,CAAC;AAAA,IACrD;AAAA,EACD;AACA,SAAO;AAAA,IACN,cAAc;AAAA,IACd,YAAY,SAAS;AAAA,IACrB,UAAU,SAAS;AAAA,IACnB,MACC,OAAO,SAAS,cAAc,WAC3B,SAAS,YACT,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,EAC5C;AACD;AAEO,SAAS,iBAAiB,QAAgD;AAChF,MAAI,OAAO,cAAc,MAAM,QAAQ,OAAO,UAAU,GAAG;AAC1D,WAAO,OAAO,WAAW,IAAI,CAAC,aAAkB;AAC/C,YAAM,oBAAoB,gBAAgB,QAAQ;AAClD,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAEA,SAAO,CAAC;AACT;AAEO,SAAS,wBAAwB,kBAAyB;AAChE,QAAM,kBAAkB,sBAAsB,gBAAgB;AAC9D,SAAO,iBAAiB,EAAE,YAAY,gBAAgB,CAAC;AACxD;;;AC7PO,SAAS,gBAAgB,UAAoB;AACnD,QAAM,aAAa,OAAO,QAAQ;AAClC,MAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AACnD,QAAM,mBAA0B,CAAC;AAEjC,SAAO,IAAI,eAA0C;AAAA,IACpD,MAAM,MAAM,YAAY;AACvB,uBAAiB,SAAS,YAAY;AACrC,YAAI,CAAC,MAAM,MAAM;AAChB;AAAA,QACD;AACA,YAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,QACD;AACA,cAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,YAAI,MAAM,OAAO;AAChB,kBAAQ,kBAAkB,KAAK;AAAA,QAChC;AACA,YAAI,MAAM,YAAY;AACrB,2BAAiB,KAAK,GAAG,MAAM,UAAU;AAAA,QAC1C;AACA,cAAM,UAAU,UACf,WAAW,QAAQ;AAAA,UAClB,MAAM;AAAA,UACN,WAAW,MAAM;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,UAAI,iBAAiB,SAAS,GAAG;AAChC,cAAM,YAAY,wBAAwB,gBAAgB;AAC1D,kBAAU,IAAI,CAAC,aAAa;AAC3B,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,GAAG;AAAA,UACJ,CAAC;AAAA,QACF,CAAC;AAAA,MACF;AAEA,iBAAW,QAAQ;AAAA,QAClB,MAAM;AAAA,QACN,cAAc;AAAA,QACd;AAAA,MACD,CAAC;AACD,iBAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;;;APjCO,IAAM,2BAAN,MAA0D;AAAA,EAShE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,UAAU,+BAA+B,MAAM;AAAA,IAChD;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACjD,OAAO,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAAA,IAC9E,CAAC;AAED,WAAO;AAAA,MACN,MAAM,OAAO;AAAA,MACb,WAAW,iBAAiB,MAAM;AAAA,MAClC,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,OAAO,KAAK,IAAI,CAAC,EAAE,SAAS,UAAU,MAAM,OAAO;AAAA,QAC3D,IAAI;AAAA,QACJ,YAAY;AAAA,QACZ,KAAK;AAAA,QACL,kBAAkB;AAAA,UACjB,YAAY,EAAE,MAAM;AAAA,QACrB;AAAA,MACD,EAAE;AAAA,IACH;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,IAAI,+BAA+B,QAAQ,MAAM;AAElE,UAAM,QAAQ,SAAS,IAAI,CAAC,EAAE,SAAS,KAAK,MAAM,GAAG,IAAI,KAAK,OAAO,EAAE,EAAE,KAAK,MAAM;AAEpF,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,SAAS;AAAA,MACnD;AAAA,MACA,QAAQ;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACN,QAAQ,gBAAgB,QAAQ;AAAA,MAChC,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AACD;;;AQ3KA,SAAS,0CAAiE;AAqBnE,IAAM,0BAAN,MAAkE;AAAA,EA2BxE,YACC,SACA,UACA,QACC;AA1BF;AAAA;AAAA;AAAA;AAAA,wBAAS,wBAAuB;AAChC,wBAAS;AACT,wBAAiB;AACjB,wBAAiB;AAwBhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA;AAAA;AAAA;AAAA,EAtBA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,uBAA+B;AAElC,UAAM,uBAAuB,KAAK,YAAY,+BAA+B,OAAO;AACpF,WAAO,KAAK,SAAS,wBAAwB;AAAA,EAC9C;AAAA,EAEA,IAAI,wBAAiC;AACpC,WAAO,KAAK,SAAS,yBAAyB;AAAA,EAC/C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACb;AAAA,EACD,GAEE;AACD,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC9C,YAAM,IAAI,mCAAmC;AAAA,QAC5C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACD,CAAC;AAAA,IACF;AAEA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAEhD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,MAAM;AAAA,MACP;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,WAAO;AAAA,MACN,YAAY,SAAS;AAAA,IACtB;AAAA,EACD;AACD;;;ACtFA;AAAA,EAIC,iCAAAC;AAAA,OACM;AAeA,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,IACd;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAIC,+BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAG1E,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,iBAAiB,MAAM;AAAA,MAClC,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI,+BAA+B,QAAQ,MAAM;AAK1E,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,QAAQ,gBAAgB,IAAI,SAAS,QAAQ,CAAC;AAAA,MAC9C,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD;AAAA,IACD;AAAA,EACD;AACD;;;ACxQO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;AC/BO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AACF,QAAM,uBAAuB,CAC5B,SACA,WAAuC,CAAC,MAExC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;AAiBO,SAAS,cAAc,SAA2C;AACxE,QAAM,UAAU,QAAQ;AAExB,QAAM,kBAAkB,CAAC,WAAgC,CAAC,MACzD,IAAI,yBAAyB,4CAA4C,UAAU;AAAA,IAClF,UAAU;AAAA,IACV;AAAA,EACD,CAAC;AAEF,QAAM,WAAW,CAAC,aAAmC;AACpD,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,QAAQ;AAAA,EAChC;AAEA,WAAS,OAAO;AAEhB,SAAO;AACR;","names":["split","UnsupportedFunctionalityError","UnsupportedFunctionalityError","response","stream"]}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "workers-ai-provider",
3
3
  "description": "Workers AI Provider for the vercel AI SDK",
4
4
  "type": "module",
5
- "version": "0.5.2",
5
+ "version": "0.6.0",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
8
8
  "repository": {
@@ -34,7 +34,8 @@
34
34
  "@ai-sdk/provider": "^1.1.3"
35
35
  },
36
36
  "devDependencies": {
37
- "@cloudflare/workers-types": "^4.20250521.0"
37
+ "@cloudflare/workers-types": "^4.20250525.0",
38
+ "zod": "^3.25.28"
38
39
  },
39
40
  "scripts": {
40
41
  "build": "rm -rf dist && tsup src/index.ts --dts --sourcemap --format esm --target es2020",
package/src/streaming.ts CHANGED
@@ -24,7 +24,6 @@ export function getMappedStream(response: Response) {
24
24
  }
25
25
  if (chunk.tool_calls) {
26
26
  partialToolCalls.push(...chunk.tool_calls);
27
- continue;
28
27
  }
29
28
  chunk.response?.length &&
30
29
  controller.enqueue({
package/src/utils.ts CHANGED
@@ -220,6 +220,7 @@ function mergePartialToolCalls(partialCalls: any[]) {
220
220
  }
221
221
 
222
222
  function processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {
223
+ // Check for OpenAI format tool calls first
223
224
  if (toolCall.function && toolCall.id) {
224
225
  return {
225
226
  toolCallType: "function",
@@ -243,7 +244,6 @@ function processToolCall(toolCall: any): LanguageModelV1FunctionToolCall {
243
244
  }
244
245
 
245
246
  export function processToolCalls(output: any): LanguageModelV1FunctionToolCall[] {
246
- // Check for OpenAI format tool calls first
247
247
  if (output.tool_calls && Array.isArray(output.tool_calls)) {
248
248
  return output.tool_calls.map((toolCall: any) => {
249
249
  const processedToolCall = processToolCall(toolCall);