workers-ai-provider 0.3.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -385,12 +385,7 @@ var WorkersAIChatLanguageModel = class {
385
385
  }
386
386
  return {
387
387
  text: typeof output.response === "object" && output.response !== null ? JSON.stringify(output.response) : output.response,
388
- toolCalls: output.tool_calls?.map((toolCall) => ({
389
- toolCallType: "function",
390
- toolCallId: toolCall.name,
391
- toolName: toolCall.name,
392
- args: JSON.stringify(toolCall.arguments || {})
393
- })),
388
+ toolCalls: processToolCalls(output),
394
389
  finishReason: "stop",
395
390
  // TODO: mapWorkersAIFinishReason(response.finish_reason),
396
391
  rawCall: { rawPrompt: messages, rawSettings: args },
@@ -496,6 +491,27 @@ var WorkersAIChatLanguageModel = class {
496
491
  };
497
492
  }
498
493
  };
494
+ function processToolCalls(output) {
495
+ if (output.tool_calls && Array.isArray(output.tool_calls)) {
496
+ return output.tool_calls.map((toolCall) => {
497
+ if (toolCall.function && toolCall.id) {
498
+ return {
499
+ toolCallType: "function",
500
+ toolCallId: toolCall.id,
501
+ toolName: toolCall.function.name,
502
+ args: typeof toolCall.function.arguments === "string" ? toolCall.function.arguments : JSON.stringify(toolCall.function.arguments || {})
503
+ };
504
+ }
505
+ return {
506
+ toolCallType: "function",
507
+ toolCallId: toolCall.name,
508
+ toolName: toolCall.name,
509
+ args: typeof toolCall.arguments === "string" ? toolCall.arguments : JSON.stringify(toolCall.arguments || {})
510
+ };
511
+ });
512
+ }
513
+ return [];
514
+ }
499
515
  function prepareToolsAndToolChoice(mode) {
500
516
  const tools = mode.tools?.length ? mode.tools : void 0;
501
517
  if (tools == null) {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/utils.ts","../src/workersai-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/map-workersai-usage.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${urlParams ? `?${urlParams}` : \"\"}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(\n\t\t\toptions.prompt,\n\t\t);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: output.tool_calls?.map((toolCall) => ({\n\t\t\t\ttoolCallType: \"function\",\n\t\t\t\ttoolCallId: toolCall.name,\n\t\t\t\ttoolName: toolCall.name,\n\t\t\t\targs: JSON.stringify(toolCall.arguments || {}),\n\t\t\t})),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(\n\t\t\toptions.prompt,\n\t\t);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\tconst chunkEvent = events(new Response(response));\n\t\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\n\t\treturn {\n\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\tasync start(controller) {\n\t\t\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\t\t\tif (!event.data) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\t\t\tif (chunk.usage) {\n\t\t\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tchunk.response?.length &&\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\tusage: usage,\n\t\t\t\t\t});\n\t\t\t\t\tcontroller.close();\n\t\t\t\t},\n\t\t\t}),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter(\n\t\t\t\t\t(tool) => tool.function.name === toolChoice.toolName,\n\t\t\t\t),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nfunction lastMessageWasUser(messages: WorkersAIChatPrompt) {\n\treturn messages.length > 0 && messages[messages.length - 1].role === \"user\";\n}\n","import type {\n\tLanguageModelV1Prompt,\n\tLanguageModelV1ProviderMetadata,\n} from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata:\n\t\t\t\t\t\t\t\t\t\t\t\tpart.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(\n\t\t\t\t\t\t\t\t`Unsupported part: ${exhaustiveCheck}`,\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(\n\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t\t}) => ({\n\t\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { messages, images };\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { createRun } from \"./utils\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels, TextGenerationModels } from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAyDO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GAAG,YAAY,IAAI,SAAS,KAAK,EAAE;AAGzH,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;;;ACzHA;AAAA,EAIC;AAAA,OACM;;;ACCA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,UAAU,KAAK;AAAA,oBACf,OAAO,KAAK;AAAA,oBACZ,kBACC,KAAK;AAAA,kBACP,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,QACZ,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI;AAAA,gBACT,qBAAqB,eAAe;AAAA,cACrC;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU;AAAA,YACV,CAAC;AAAA,cACA,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,YACnC,OAAO;AAAA,cACN,IAAI;AAAA,cACJ,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,YACnC;AAAA,UACD,IACC;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO;AAC3B;;;ACtIA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACzEO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ALMO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,IACd;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI;AAAA,MAC5B,QAAQ;AAAA,IACT;AAGA,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,OAAO,YAAY,IAAI,CAAC,cAAc;AAAA,QAChD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS;AAAA,QACnB,MAAM,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,MAC9C,EAAE;AAAA,MACF,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI;AAAA,MAC5B,QAAQ;AAAA,IACT;AAKA,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,UAAM,aAAa,OAAO,IAAI,SAAS,QAAQ,CAAC;AAChD,QAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAEnD,WAAO;AAAA,MACN,QAAQ,IAAI,eAA0C;AAAA,QACrD,MAAM,MAAM,YAAY;AACvB,2BAAiB,SAAS,YAAY;AACrC,gBAAI,CAAC,MAAM,MAAM;AAChB;AAAA,YACD;AACA,gBAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,YACD;AACA,kBAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,gBAAI,MAAM,OAAO;AAChB,sBAAQ,kBAAkB,KAAK;AAAA,YAChC;AACA,kBAAM,UAAU,UACf,WAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,WAAW,MAAM;AAAA,YAClB,CAAC;AAAA,UACH;AACA,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,cAAc;AAAA,YACd;AAAA,UACD,CAAC;AACD,qBAAW,MAAM;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,MACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,0BACR,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY;AAAA,UAClB,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW;AAAA,QAC7C;AAAA,QACA,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEA,SAAS,mBAAmB,UAA+B;AAC1D,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAE,SAAS;AACtE;;;AMzWO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;ACxDO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;","names":["split","response","stream"]}
1
+ {"version":3,"sources":["../src/utils.ts","../src/workersai-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/.pnpm/fetch-event-stream@0.1.5/node_modules/fetch-event-stream/esm/mod.js","../src/map-workersai-usage.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${urlParams ? `?${urlParams}` : \"\"}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(\n\t\t\toptions.prompt,\n\t\t);\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: processToolCalls(output),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// Extract image from messages if present\n\t\tconst { messages, images } = convertToWorkersAIChatMessages(\n\t\t\toptions.prompt,\n\t\t);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\t// TODO: support for multiple images\n\t\tif (images.length !== 0 && images.length !== 1) {\n\t\t\tthrow new Error(\"Multiple images are not yet supported as input\");\n\t\t}\n\n\t\tconst imagePart = images[0];\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// Convert Uint8Array to Array of integers for Llama 3.2 Vision model\n\t\t\t\t// TODO: maybe use the base64 string version?\n\t\t\t\t...(imagePart ? { image: Array.from(imagePart.image) } : {}),\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\tconst chunkEvent = events(new Response(response));\n\t\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\n\t\treturn {\n\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\tasync start(controller) {\n\t\t\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\t\t\tif (!event.data) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\t\t\tif (chunk.usage) {\n\t\t\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tchunk.response?.length &&\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\tusage: usage,\n\t\t\t\t\t});\n\t\t\t\t\tcontroller.close();\n\t\t\t\t},\n\t\t\t}),\n\t\t\trawCall: { rawPrompt: messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction processToolCalls(output: any) {\n\t// Check for OpenAI format tool calls first\n\tif (output.tool_calls && Array.isArray(output.tool_calls)) {\n\t\treturn output.tool_calls.map((toolCall: any) => {\n\t\t\t// Handle new format\n\t\t\tif (toolCall.function && toolCall.id) {\n\t\t\t\treturn {\n\t\t\t\t\ttoolCallType: \"function\",\n\t\t\t\t\ttoolCallId: toolCall.id,\n\t\t\t\t\ttoolName: toolCall.function.name,\n\t\t\t\t\targs:\n\t\t\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {}),\n\t\t\t\t};\n\t\t\t}\n\t\t\treturn {\n\t\t\t\ttoolCallType: \"function\",\n\t\t\t\ttoolCallId: toolCall.name,\n\t\t\t\ttoolName: toolCall.name,\n\t\t\t\targs:\n\t\t\t\t\ttypeof toolCall.arguments === \"string\"\n\t\t\t\t\t\t? toolCall.arguments\n\t\t\t\t\t\t: JSON.stringify(toolCall.arguments || {}),\n\t\t\t};\n\t\t});\n\t}\n\n\treturn [];\n}\n\nfunction prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter(\n\t\t\t\t\t(tool) => tool.function.name === toolChoice.toolName,\n\t\t\t\t),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nfunction lastMessageWasUser(messages: WorkersAIChatPrompt) {\n\treturn messages.length > 0 && messages[messages.length - 1].role === \"user\";\n}\n","import type {\n\tLanguageModelV1Prompt,\n\tLanguageModelV1ProviderMetadata,\n} from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): {\n\tmessages: WorkersAIChatPrompt;\n\timages: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[];\n} {\n\tconst messages: WorkersAIChatPrompt = [];\n\tconst images: {\n\t\tmimeType: string | undefined;\n\t\timage: Uint8Array;\n\t\tproviderMetadata: LanguageModelV1ProviderMetadata | undefined;\n\t}[] = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\t// Extract image from this part\n\t\t\t\t\t\t\t\t\tif (part.image instanceof Uint8Array) {\n\t\t\t\t\t\t\t\t\t\t// Store the image data directly as Uint8Array\n\t\t\t\t\t\t\t\t\t\t// For Llama 3.2 Vision model, which needs array of integers\n\t\t\t\t\t\t\t\t\t\timages.push({\n\t\t\t\t\t\t\t\t\t\t\tmimeType: part.mimeType,\n\t\t\t\t\t\t\t\t\t\t\timage: part.image,\n\t\t\t\t\t\t\t\t\t\t\tproviderMetadata:\n\t\t\t\t\t\t\t\t\t\t\t\tpart.providerMetadata,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\treturn \"\"; // No text for the image part\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\\n\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(\n\t\t\t\t\t\t\t\t`Unsupported part: ${exhaustiveCheck}`,\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(\n\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t\t}) => ({\n\t\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { messages, images };\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { createRun } from \"./utils\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels, TextGenerationModels } from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAyDO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GAAG,YAAY,IAAI,SAAS,KAAK,EAAE;AAGzH,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;;;ACzHA;AAAA,EAIC;AAAA,OACM;;;ACCA,SAAS,+BAA+B,QAO7C;AACD,QAAM,WAAgC,CAAC;AACvC,QAAM,SAIA,CAAC;AAEP,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AAEb,oBAAI,KAAK,iBAAiB,YAAY;AAGrC,yBAAO,KAAK;AAAA,oBACX,UAAU,KAAK;AAAA,oBACf,OAAO,KAAK;AAAA,oBACZ,kBACC,KAAK;AAAA,kBACP,CAAC;AAAA,gBACF;AACA,uBAAO;AAAA,cACR;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,IAAI;AAAA,QACZ,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI;AAAA,gBACT,qBAAqB,eAAe;AAAA,cACrC;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU;AAAA,YACV,CAAC;AAAA,cACA,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,YACnC,OAAO;AAAA,cACN,IAAI;AAAA,cACJ,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,YACnC;AAAA,UACD,IACC;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO;AAC3B;;;ACtIA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIA,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACzEO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ALMO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,IACd;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAG5D,UAAM,EAAE,UAAU,OAAO,IAAI;AAAA,MAC5B,QAAQ;AAAA,IACT;AAGA,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,iBAAiB,MAAM;AAAA,MAClC,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAG/C,UAAM,EAAE,UAAU,OAAO,IAAI;AAAA,MAC5B,QAAQ;AAAA,IACT;AAKA,QAAI,KAAK,OAAO,UAAU,mBAAmB,QAAQ,GAAG;AACvD,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,QAClD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAGhD,QAAI,OAAO,WAAW,KAAK,OAAO,WAAW,GAAG;AAC/C,YAAM,IAAI,MAAM,gDAAgD;AAAA,IACjE;AAEA,UAAM,YAAY,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA;AAAA,QAGZ,GAAI,YAAY,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,EAAE,IAAI,CAAC;AAAA;AAAA,QAE1D,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,UAAM,aAAa,OAAO,IAAI,SAAS,QAAQ,CAAC;AAChD,QAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAEnD,WAAO;AAAA,MACN,QAAQ,IAAI,eAA0C;AAAA,QACrD,MAAM,MAAM,YAAY;AACvB,2BAAiB,SAAS,YAAY;AACrC,gBAAI,CAAC,MAAM,MAAM;AAChB;AAAA,YACD;AACA,gBAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,YACD;AACA,kBAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,gBAAI,MAAM,OAAO;AAChB,sBAAQ,kBAAkB,KAAK;AAAA,YAChC;AACA,kBAAM,UAAU,UACf,WAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,WAAW,MAAM;AAAA,YAClB,CAAC;AAAA,UACH;AACA,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,cAAc;AAAA,YACd;AAAA,UACD,CAAC;AACD,qBAAW,MAAM;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,MACD,SAAS,EAAE,WAAW,UAAU,aAAa,KAAK;AAAA,MAClD;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,iBAAiB,QAAa;AAEtC,MAAI,OAAO,cAAc,MAAM,QAAQ,OAAO,UAAU,GAAG;AAC1D,WAAO,OAAO,WAAW,IAAI,CAAC,aAAkB;AAE/C,UAAI,SAAS,YAAY,SAAS,IAAI;AACrC,eAAO;AAAA,UACN,cAAc;AAAA,UACd,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,MACC,OAAO,SAAS,SAAS,cAAc,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,aAAa,CAAC,CAAC;AAAA,QACrD;AAAA,MACD;AACA,aAAO;AAAA,QACN,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS;AAAA,QACnB,MACC,OAAO,SAAS,cAAc,WAC3B,SAAS,YACT,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,MAC5C;AAAA,IACD,CAAC;AAAA,EACF;AAEA,SAAO,CAAC;AACT;AAEA,SAAS,0BACR,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY;AAAA,UAClB,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW;AAAA,QAC7C;AAAA,QACA,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEA,SAAS,mBAAmB,UAA+B;AAC1D,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAE,SAAS;AACtE;;;AMnYO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;ACxDO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;","names":["split","response","stream"]}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "workers-ai-provider",
3
3
  "description": "Workers AI Provider for the vercel AI SDK",
4
4
  "type": "module",
5
- "version": "0.3.1",
5
+ "version": "0.4.0",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
8
8
  "repository": {
@@ -31,8 +31,8 @@
31
31
  "serverless"
32
32
  ],
33
33
  "dependencies": {
34
- "@cloudflare/workers-types": "^4.20250313.0",
35
- "@ai-sdk/provider": "^1.1.2"
34
+ "@cloudflare/workers-types": "^4.20250509.0",
35
+ "@ai-sdk/provider": "^1.1.3"
36
36
  },
37
37
  "scripts": {
38
38
  "build": "rm -rf dist && tsup src/index.ts --dts --sourcemap --format esm --target es2020",
@@ -175,12 +175,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
175
175
  typeof output.response === "object" && output.response !== null
176
176
  ? JSON.stringify(output.response) // ai-sdk expects a string here
177
177
  : output.response,
178
- toolCalls: output.tool_calls?.map((toolCall) => ({
179
- toolCallType: "function",
180
- toolCallId: toolCall.name,
181
- toolName: toolCall.name,
182
- args: JSON.stringify(toolCall.arguments || {}),
183
- })),
178
+ toolCalls: processToolCalls(output),
184
179
  finishReason: "stop", // TODO: mapWorkersAIFinishReason(response.finish_reason),
185
180
  rawCall: { rawPrompt: messages, rawSettings: args },
186
181
  usage: mapWorkersAIUsage(output),
@@ -307,6 +302,37 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
307
302
  }
308
303
  }
309
304
 
305
+ function processToolCalls(output: any) {
306
+ // Check for OpenAI format tool calls first
307
+ if (output.tool_calls && Array.isArray(output.tool_calls)) {
308
+ return output.tool_calls.map((toolCall: any) => {
309
+ // Handle new format
310
+ if (toolCall.function && toolCall.id) {
311
+ return {
312
+ toolCallType: "function",
313
+ toolCallId: toolCall.id,
314
+ toolName: toolCall.function.name,
315
+ args:
316
+ typeof toolCall.function.arguments === "string"
317
+ ? toolCall.function.arguments
318
+ : JSON.stringify(toolCall.function.arguments || {}),
319
+ };
320
+ }
321
+ return {
322
+ toolCallType: "function",
323
+ toolCallId: toolCall.name,
324
+ toolName: toolCall.name,
325
+ args:
326
+ typeof toolCall.arguments === "string"
327
+ ? toolCall.arguments
328
+ : JSON.stringify(toolCall.arguments || {}),
329
+ };
330
+ });
331
+ }
332
+
333
+ return [];
334
+ }
335
+
310
336
  function prepareToolsAndToolChoice(
311
337
  mode: Parameters<LanguageModelV1["doGenerate"]>[0]["mode"] & {
312
338
  type: "regular";