workers-ai-provider 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../node_modules/secure-json-parse/index.js","../src/workersai-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../node_modules/@ai-sdk/provider-utils/node_modules/nanoid/non-secure/index.js","../node_modules/@ai-sdk/provider-utils/src/combine-headers.ts","../node_modules/@ai-sdk/provider-utils/src/convert-async-generator-to-readable-stream.ts","../node_modules/@ai-sdk/provider-utils/src/extract-response-headers.ts","../node_modules/@ai-sdk/provider-utils/src/generate-id.ts","../node_modules/@ai-sdk/provider-utils/src/get-error-message.ts","../node_modules/@ai-sdk/provider-utils/src/is-abort-error.ts","../node_modules/@ai-sdk/provider-utils/src/load-api-key.ts","../node_modules/@ai-sdk/provider-utils/src/load-setting.ts","../node_modules/@ai-sdk/provider-utils/src/parse-json.ts","../node_modules/@ai-sdk/provider-utils/src/validate-types.ts","../node_modules/@ai-sdk/provider-utils/src/validator.ts","../node_modules/@ai-sdk/provider-utils/src/post-to-api.ts","../node_modules/@ai-sdk/provider-utils/src/remove-undefined-entries.ts","../node_modules/@ai-sdk/provider-utils/src/response-handler.ts","../node_modules/@ai-sdk/provider-utils/src/uint8-utils.ts","../node_modules/@ai-sdk/provider-utils/src/without-trailing-slash.ts","../src/workersai-error.ts","../src/index.ts"],"sourcesContent":["'use strict'\n\nconst hasBuffer = typeof Buffer !== 'undefined'\nconst suspectProtoRx = /\"(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])\"\\s*:/\nconst suspectConstructorRx = /\"(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)\"\\s*:/\n\nfunction _parse (text, reviver, options) {\n // Normalize arguments\n if (options == null) {\n if (reviver !== null && typeof reviver === 'object') {\n options = reviver\n reviver = undefined\n }\n }\n\n if (hasBuffer && Buffer.isBuffer(text)) {\n text = text.toString()\n }\n\n // BOM checker\n if (text && text.charCodeAt(0) === 0xFEFF) {\n text = text.slice(1)\n }\n\n // Parse normally, allowing exceptions\n const obj = JSON.parse(text, reviver)\n\n // Ignore null and non-objects\n if (obj === null || typeof obj !== 'object') {\n return obj\n }\n\n const protoAction = (options && options.protoAction) || 'error'\n const constructorAction = (options && options.constructorAction) || 'error'\n\n // options: 'error' (default) / 'remove' / 'ignore'\n if (protoAction === 'ignore' && constructorAction === 'ignore') {\n return obj\n }\n\n if (protoAction !== 'ignore' && constructorAction !== 'ignore') {\n if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {\n return obj\n }\n } else if (protoAction !== 'ignore' && constructorAction === 'ignore') {\n if (suspectProtoRx.test(text) === false) {\n return obj\n }\n } else {\n if (suspectConstructorRx.test(text) === false) {\n return obj\n }\n }\n\n // Scan result for proto keys\n return filter(obj, { protoAction, constructorAction, safe: options && options.safe })\n}\n\nfunction filter (obj, { protoAction = 'error', constructorAction = 'error', safe } = {}) {\n let next = [obj]\n\n while (next.length) {\n const nodes = next\n next = []\n\n for (const node of nodes) {\n if (protoAction !== 'ignore' && Object.prototype.hasOwnProperty.call(node, '__proto__')) { // Avoid calling node.hasOwnProperty directly\n if (safe === true) {\n return null\n } else if (protoAction === 'error') {\n throw new SyntaxError('Object contains forbidden prototype property')\n }\n\n delete node.__proto__ // eslint-disable-line no-proto\n }\n\n if (constructorAction !== 'ignore' &&\n Object.prototype.hasOwnProperty.call(node, 'constructor') &&\n Object.prototype.hasOwnProperty.call(node.constructor, 'prototype')) { // Avoid calling node.hasOwnProperty directly\n if (safe === true) {\n return null\n } else if (constructorAction === 'error') {\n throw new SyntaxError('Object contains forbidden prototype property')\n }\n\n delete node.constructor\n }\n\n for (const key in node) {\n const value = node[key]\n if (value && typeof value === 'object') {\n next.push(value)\n }\n }\n }\n }\n return obj\n}\n\nfunction parse (text, reviver, options) {\n const stackTraceLimit = Error.stackTraceLimit\n Error.stackTraceLimit = 0\n try {\n return _parse(text, reviver, options)\n } finally {\n Error.stackTraceLimit = stackTraceLimit\n }\n}\n\nfunction safeParse (text, reviver) {\n const stackTraceLimit = Error.stackTraceLimit\n Error.stackTraceLimit = 0\n try {\n return _parse(text, reviver, { safe: true })\n } catch (_e) {\n return null\n } finally {\n Error.stackTraceLimit = stackTraceLimit\n }\n}\n\nmodule.exports = parse\nmodule.exports.default = parse\nmodule.exports.parse = parse\nmodule.exports.safeParse = safeParse\nmodule.exports.scan = filter\n","import {\n type LanguageModelV1,\n type LanguageModelV1CallWarning,\n type LanguageModelV1FinishReason,\n type LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport type {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIFinishReason } from \"./map-workersai-finish-reason\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { workersAIFailedResponseHandler } from \"./workersai-error\";\n\nimport { events } from \"fetch-event-stream\";\n\ntype WorkersAIChatConfig = {\n provider: string;\n binding: Ai;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n readonly defaultObjectGenerationMode = \"json\";\n\n readonly modelId: BaseAiTextGenerationModels;\n readonly settings: WorkersAIChatSettings;\n\n private readonly config: WorkersAIChatConfig;\n\n constructor(\n modelId: BaseAiTextGenerationModels,\n settings: WorkersAIChatSettings,\n config: WorkersAIChatConfig\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"frequencyPenalty\",\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"presencePenalty\",\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToWorkersAIChatMessages(prompt),\n };\n\n switch (type) {\n case \"regular\": {\n return {\n args: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n warnings,\n };\n }\n\n case \"object-json\": {\n return {\n args: {\n ...baseArgs,\n response_format: { type: \"json_object\" },\n },\n warnings,\n };\n }\n\n case \"object-tool\": {\n return {\n args: {\n ...baseArgs,\n tool_choice: \"any\",\n tools: [{ type: \"function\", function: mode.tool }],\n },\n warnings,\n };\n }\n\n // @ts-expect-error - this is unreachable code\n // TODO: fixme\n case \"object-grammar\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"object-grammar mode\",\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1[\"doGenerate\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n console.warn(warnings);\n\n const response = (await this.config.binding.run(args.model, {\n messages: args.messages,\n })) as { response: string };\n\n return {\n text: response.response,\n finishReason: \"stop\",\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1[\"doStream\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = (await this.config.binding.run(args.model, {\n messages: args.messages,\n stream: true,\n })) as ReadableStream;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof workersAIChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n async transform(chunk, controller) {\n const chunkToText = new TextDecoder().decode(\n chunk as unknown as Uint8Array\n );\n const chunks = events(new Response(chunkToText));\n for await (const singleChunk of chunks) {\n if (!singleChunk.data) {\n continue;\n }\n if (singleChunk.data === \"[DONE]\") {\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n return;\n }\n const data = JSON.parse(singleChunk.data);\n console.log(\"data\", data);\n controller.enqueue({\n type: \"text-delta\",\n textDelta: data.response ?? \"DATALOSS\",\n });\n }\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n },\n })\n ),\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n warnings,\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatResponseSchema = z.object({\n response: z.string(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatChunkSchema = z.instanceof(Uint8Array);\n\nfunction prepareToolsAndToolChoice(\n mode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n type: \"regular\";\n }\n) {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined };\n }\n\n const mappedTools = tools.map((tool) => ({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n }));\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mappedTools, tool_choice: undefined };\n }\n\n const type = toolChoice.type;\n\n console.log(mode, type);\n\n switch (type) {\n case \"auto\":\n return { tools: mappedTools, tool_choice: type };\n case \"none\":\n return { tools: mappedTools, tool_choice: type };\n case \"required\":\n return { tools: mappedTools, tool_choice: \"any\" };\n\n // workersAI does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case \"tool\":\n return {\n tools: mappedTools.filter(\n (tool) => tool.function.name === toolChoice.toolName\n ),\n tool_choice: \"any\",\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);\n }\n }\n}\n","import {\n type LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\n// TODO\nexport function convertToWorkersAIChatMessages(\n prompt: LanguageModelV1Prompt\n): WorkersAIChatPrompt {\n const messages: WorkersAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case \"system\": {\n messages.push({ role: \"system\", content });\n break;\n }\n\n case \"user\": {\n messages.push({\n role: \"user\",\n content: content\n .map((part) => {\n switch (part.type) {\n case \"text\": {\n return part.text;\n }\n case \"image\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"image-part\",\n });\n }\n }\n })\n .join(\"\"),\n });\n break;\n }\n\n case \"assistant\": {\n let text = \"\";\n const toolCalls: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case \"text\": {\n text += part.text;\n break;\n }\n case \"tool-call\": {\n toolCalls.push({\n id: part.toolCallId,\n type: \"function\",\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: \"assistant\",\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: \"null\",\n type: \"function\",\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case \"tool\": {\n for (const toolResponse of content) {\n messages.push({\n role: \"tool\",\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","let urlAlphabet =\n 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict'\nlet customAlphabet = (alphabet, defaultSize = 21) => {\n return (size = defaultSize) => {\n let id = ''\n let i = size\n while (i--) {\n id += alphabet[(Math.random() * alphabet.length) | 0]\n }\n return id\n }\n}\nlet nanoid = (size = 21) => {\n let id = ''\n let i = size\n while (i--) {\n id += urlAlphabet[(Math.random() * 64) | 0]\n }\n return id\n}\nexport { nanoid, customAlphabet }\n","export function combineHeaders(\n ...headers: Array<Record<string, string | undefined> | undefined>\n): Record<string, string | undefined> {\n return headers.reduce(\n (combinedHeaders, currentHeaders) => ({\n ...combinedHeaders,\n ...(currentHeaders ?? {}),\n }),\n {},\n ) as Record<string, string | undefined>;\n}\n","/**\n * Converts an AsyncGenerator to a ReadableStream.\n *\n * @template T - The type of elements produced by the AsyncGenerator.\n * @param {AsyncGenerator<T>} stream - The AsyncGenerator to convert.\n * @returns {ReadableStream<T>} - A ReadableStream that provides the same data as the AsyncGenerator.\n */\nexport function convertAsyncGeneratorToReadableStream<T>(\n stream: AsyncGenerator<T>,\n): ReadableStream<T> {\n return new ReadableStream<T>({\n /**\n * Called when the consumer wants to pull more data from the stream.\n *\n * @param {ReadableStreamDefaultController<T>} controller - The controller to enqueue data into the stream.\n * @returns {Promise<void>}\n */\n async pull(controller) {\n try {\n const { value, done } = await stream.next();\n if (done) {\n controller.close();\n } else {\n controller.enqueue(value);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n /**\n * Called when the consumer cancels the stream.\n */\n cancel() {},\n });\n}\n","/**\nExtracts the headers from a response object and returns them as a key-value object.\n\n@param response - The response object to extract headers from.\n@returns The headers as a key-value object.\n*/\nexport function extractResponseHeaders(\n response: Response,\n): Record<string, string> {\n const headers: Record<string, string> = {};\n response.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return headers;\n}\n","import { customAlphabet } from 'nanoid/non-secure';\n\n/**\n * Generates a 7-character random string to use for IDs. Not secure.\n */\nexport const generateId = customAlphabet(\n '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',\n 7,\n);\n","export function getErrorMessage(error: unknown | undefined) {\n if (error == null) {\n return 'unknown error';\n }\n\n if (typeof error === 'string') {\n return error;\n }\n\n if (error instanceof Error) {\n return error.message;\n }\n\n return JSON.stringify(error);\n}\n","export function isAbortError(error: unknown): error is Error {\n return (\n error instanceof Error &&\n (error.name === 'AbortError' || error.name === 'TimeoutError')\n );\n}\n","import { LoadAPIKeyError } from '@ai-sdk/provider';\n\nexport function loadApiKey({\n apiKey,\n environmentVariableName,\n apiKeyParameterName = 'apiKey',\n description,\n}: {\n apiKey: string | undefined;\n environmentVariableName: string;\n apiKeyParameterName?: string;\n description: string;\n}): string {\n if (typeof apiKey === 'string') {\n return apiKey;\n }\n\n if (apiKey != null) {\n throw new LoadAPIKeyError({\n message: `${description} API key must be a string.`,\n });\n }\n\n if (typeof process === 'undefined') {\n throw new LoadAPIKeyError({\n message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`,\n });\n }\n\n apiKey = process.env[environmentVariableName];\n\n if (apiKey == null) {\n throw new LoadAPIKeyError({\n message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`,\n });\n }\n\n if (typeof apiKey !== 'string') {\n throw new LoadAPIKeyError({\n message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`,\n });\n }\n\n return apiKey;\n}\n","import { LoadSettingError } from '@ai-sdk/provider';\n\nexport function loadSetting({\n settingValue,\n environmentVariableName,\n settingName,\n description,\n}: {\n settingValue: string | undefined;\n environmentVariableName: string;\n settingName: string;\n description: string;\n}): string {\n if (typeof settingValue === 'string') {\n return settingValue;\n }\n\n if (settingValue != null) {\n throw new LoadSettingError({\n message: `${description} setting must be a string.`,\n });\n }\n\n if (typeof process === 'undefined') {\n throw new LoadSettingError({\n message: `${description} setting is missing. Pass it using the '${settingName}' parameter. Environment variables is not supported in this environment.`,\n });\n }\n\n settingValue = process.env[environmentVariableName];\n\n if (settingValue == null) {\n throw new LoadSettingError({\n message: `${description} setting is missing. Pass it using the '${settingName}' parameter or the ${environmentVariableName} environment variable.`,\n });\n }\n\n if (typeof settingValue !== 'string') {\n throw new LoadSettingError({\n message: `${description} setting must be a string. The value of the ${environmentVariableName} environment variable is not a string.`,\n });\n }\n\n return settingValue;\n}\n","import { JSONParseError, TypeValidationError } from '@ai-sdk/provider';\nimport SecureJSON from 'secure-json-parse';\nimport { ZodSchema } from 'zod';\nimport { safeValidateTypes, validateTypes } from './validate-types';\nimport { Validator } from './validator';\n\n/**\n * Parses a JSON string into an unknown object.\n *\n * @param text - The JSON string to parse.\n * @returns {unknown} - The parsed JSON object.\n */\nexport function parseJSON({ text }: { text: string }): unknown;\n/**\n * Parses a JSON string into a strongly-typed object using the provided schema.\n *\n * @template T - The type of the object to parse the JSON into.\n * @param {string} text - The JSON string to parse.\n * @param {Validator<T>} schema - The schema to use for parsing the JSON.\n * @returns {T} - The parsed object.\n */\nexport function parseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema: ZodSchema<T> | Validator<T>;\n}): T;\nexport function parseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema?: ZodSchema<T> | Validator<T>;\n}): T {\n try {\n const value = SecureJSON.parse(text);\n\n if (schema == null) {\n return value;\n }\n\n return validateTypes({ value, schema });\n } catch (error) {\n if (\n JSONParseError.isJSONParseError(error) ||\n TypeValidationError.isTypeValidationError(error)\n ) {\n throw error;\n }\n\n throw new JSONParseError({ text, cause: error });\n }\n}\n\nexport type ParseResult<T> =\n | { success: true; value: T }\n | { success: false; error: JSONParseError | TypeValidationError };\n\n/**\n * Safely parses a JSON string and returns the result as an object of type `unknown`.\n *\n * @param text - The JSON string to parse.\n * @returns {object} Either an object with `success: true` and the parsed data, or an object with `success: false` and the error that occurred.\n */\nexport function safeParseJSON({ text }: { text: string }): ParseResult<unknown>;\n/**\n * Safely parses a JSON string into a strongly-typed object, using a provided schema to validate the object.\n *\n * @template T - The type of the object to parse the JSON into.\n * @param {string} text - The JSON string to parse.\n * @param {Validator<T>} schema - The schema to use for parsing the JSON.\n * @returns An object with either a `success` flag and the parsed and typed data, or a `success` flag and an error object.\n */\nexport function safeParseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema: ZodSchema<T> | Validator<T>;\n}): ParseResult<T>;\nexport function safeParseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema?: ZodSchema<T> | Validator<T>;\n}):\n | { success: true; value: T }\n | { success: false; error: JSONParseError | TypeValidationError } {\n try {\n const value = SecureJSON.parse(text);\n\n if (schema == null) {\n return {\n success: true,\n value: value as T,\n };\n }\n\n return safeValidateTypes({ value, schema });\n } catch (error) {\n return {\n success: false,\n error: JSONParseError.isJSONParseError(error)\n ? error\n : new JSONParseError({ text, cause: error }),\n };\n }\n}\n\nexport function isParsableJson(input: string): boolean {\n try {\n SecureJSON.parse(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n@deprecated Use `isParsableJson` instead.\n */\nexport const isParseableJson = isParsableJson;\n","import { TypeValidationError } from '@ai-sdk/provider';\nimport { ZodSchema } from 'zod';\nimport { Validator, isValidator, zodValidator } from './validator';\n\n/**\n * Validates the types of an unknown object using a schema and\n * return a strongly-typed object.\n *\n * @template T - The type of the object to validate.\n * @param {string} options.value - The object to validate.\n * @param {Validator<T>} options.schema - The schema to use for validating the JSON.\n * @returns {T} - The typed object.\n */\nexport function validateTypes<T>({\n value,\n schema: inputSchema,\n}: {\n value: unknown;\n schema: ZodSchema<T> | Validator<T>;\n}): T {\n const result = safeValidateTypes({ value, schema: inputSchema });\n\n if (!result.success) {\n throw new TypeValidationError({ value, cause: result.error });\n }\n\n return result.value;\n}\n\n/**\n * Safely validates the types of an unknown object using a schema and\n * return a strongly-typed object.\n *\n * @template T - The type of the object to validate.\n * @param {string} options.value - The JSON object to validate.\n * @param {Validator<T>} options.schema - The schema to use for validating the JSON.\n * @returns An object with either a `success` flag and the parsed and typed data, or a `success` flag and an error object.\n */\nexport function safeValidateTypes<T>({\n value,\n schema: inputSchema,\n}: {\n value: unknown;\n schema: ZodSchema<T> | Validator<T>;\n}):\n | { success: true; value: T }\n | { success: false; error: TypeValidationError } {\n const schema = isValidator(inputSchema)\n ? inputSchema\n : zodValidator(inputSchema);\n\n try {\n if (schema.validate == null) {\n return { success: true, value: value as T };\n }\n\n const validationResult = schema.validate(value);\n\n if (validationResult.success) {\n return validationResult;\n }\n\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: validationResult.error,\n }),\n };\n } catch (error) {\n return {\n success: false,\n error: TypeValidationError.isTypeValidationError(error)\n ? error\n : new TypeValidationError({ value, cause: error }),\n };\n }\n}\n","import { z } from 'zod';\n\n/**\n * Used to mark validator functions so we can support both Zod and custom schemas.\n */\nexport const validatorSymbol = Symbol('vercel.ai.validator');\n\nexport type Validator<OBJECT = unknown> = {\n /**\n * Used to mark validator functions so we can support both Zod and custom schemas.\n */\n [validatorSymbol]: true;\n\n /**\n * Optional. Validates that the structure of a value matches this schema,\n * and returns a typed version of the value if it does.\n */\n readonly validate?: (\n value: unknown,\n ) => { success: true; value: OBJECT } | { success: false; error: Error };\n};\n\n/**\n * Create a validator.\n *\n * @param validate A validation function for the schema.\n */\nexport function validator<OBJECT>(\n validate: (\n value: unknown,\n ) => { success: true; value: OBJECT } | { success: false; error: Error },\n): Validator<OBJECT> {\n return { [validatorSymbol]: true, validate };\n}\n\nexport function isValidator(value: unknown): value is Validator {\n return (\n typeof value === 'object' &&\n value !== null &&\n validatorSymbol in value &&\n value[validatorSymbol] === true &&\n 'validate' in value\n );\n}\n\nexport function zodValidator<OBJECT>(\n zodSchema: z.Schema<OBJECT>,\n): Validator<OBJECT> {\n return validator(value => {\n const result = zodSchema.safeParse(value);\n return result.success\n ? { success: true, value: result.data }\n : { success: false, error: result.error };\n });\n}\n","import { APICallError } from '@ai-sdk/provider';\nimport { extractResponseHeaders } from './extract-response-headers';\nimport { isAbortError } from './is-abort-error';\nimport { ResponseHandler } from './response-handler';\nimport { removeUndefinedEntries } from './remove-undefined-entries';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport const postJsonToApi = async <T>({\n url,\n headers,\n body,\n failedResponseHandler,\n successfulResponseHandler,\n abortSignal,\n fetch,\n}: {\n url: string;\n headers?: Record<string, string | undefined>;\n body: unknown;\n failedResponseHandler: ResponseHandler<APICallError>;\n successfulResponseHandler: ResponseHandler<T>;\n abortSignal?: AbortSignal;\n fetch?: ReturnType<typeof getOriginalFetch>;\n}) =>\n postToApi({\n url,\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: {\n content: JSON.stringify(body),\n values: body,\n },\n failedResponseHandler,\n successfulResponseHandler,\n abortSignal,\n fetch,\n });\n\nexport const postToApi = async <T>({\n url,\n headers = {},\n body,\n successfulResponseHandler,\n failedResponseHandler,\n abortSignal,\n fetch = getOriginalFetch(),\n}: {\n url: string;\n headers?: Record<string, string | undefined>;\n body: {\n content: string | FormData | Uint8Array;\n values: unknown;\n };\n failedResponseHandler: ResponseHandler<Error>;\n successfulResponseHandler: ResponseHandler<T>;\n abortSignal?: AbortSignal;\n fetch?: ReturnType<typeof getOriginalFetch>;\n}) => {\n try {\n const response = await fetch(url, {\n method: 'POST',\n headers: removeUndefinedEntries(headers),\n body: body.content,\n signal: abortSignal,\n });\n\n const responseHeaders = extractResponseHeaders(response);\n\n if (!response.ok) {\n let errorInformation: {\n value: Error;\n responseHeaders?: Record<string, string> | undefined;\n };\n\n try {\n errorInformation = await failedResponseHandler({\n response,\n url,\n requestBodyValues: body.values,\n });\n } catch (error) {\n if (isAbortError(error) || APICallError.isAPICallError(error)) {\n throw error;\n }\n\n throw new APICallError({\n message: 'Failed to process error response',\n cause: error,\n statusCode: response.status,\n url,\n responseHeaders,\n requestBodyValues: body.values,\n });\n }\n\n throw errorInformation.value;\n }\n\n try {\n return await successfulResponseHandler({\n response,\n url,\n requestBodyValues: body.values,\n });\n } catch (error) {\n if (error instanceof Error) {\n if (isAbortError(error) || APICallError.isAPICallError(error)) {\n throw error;\n }\n }\n\n throw new APICallError({\n message: 'Failed to process successful response',\n cause: error,\n statusCode: response.status,\n url,\n responseHeaders,\n requestBodyValues: body.values,\n });\n }\n } catch (error) {\n if (isAbortError(error)) {\n throw error;\n }\n\n // unwrap original error when fetch failed (for easier debugging):\n if (error instanceof TypeError && error.message === 'fetch failed') {\n const cause = (error as any).cause;\n\n if (cause != null) {\n // Failed to connect to server:\n throw new APICallError({\n message: `Cannot connect to API: ${cause.message}`,\n cause,\n url,\n requestBodyValues: body.values,\n isRetryable: true, // retry when network error\n });\n }\n }\n\n throw error;\n }\n};\n","export function removeUndefinedEntries<T>(\n record: Record<string, T | undefined>,\n): Record<string, T> {\n return Object.fromEntries(\n Object.entries(record).filter(([_key, value]) => value != null),\n ) as Record<string, T>;\n}\n","import { APICallError, EmptyResponseBodyError } from '@ai-sdk/provider';\nimport {\n EventSourceParserStream,\n ParsedEvent,\n} from 'eventsource-parser/stream';\nimport { ZodSchema } from 'zod';\nimport { extractResponseHeaders } from './extract-response-headers';\nimport { ParseResult, parseJSON, safeParseJSON } from './parse-json';\n\nexport type ResponseHandler<RETURN_TYPE> = (options: {\n url: string;\n requestBodyValues: unknown;\n response: Response;\n}) => PromiseLike<{\n value: RETURN_TYPE;\n responseHeaders?: Record<string, string>;\n}>;\n\nexport const createJsonErrorResponseHandler =\n <T>({\n errorSchema,\n errorToMessage,\n isRetryable,\n }: {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n }): ResponseHandler<APICallError> =>\n async ({ response, url, requestBodyValues }) => {\n const responseBody = await response.text();\n const responseHeaders = extractResponseHeaders(response);\n\n // Some providers return an empty response body for some errors:\n if (responseBody.trim() === '') {\n return {\n responseHeaders,\n value: new APICallError({\n message: response.statusText,\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n isRetryable: isRetryable?.(response),\n }),\n };\n }\n\n // resilient parsing in case the response is not JSON or does not match the schema:\n try {\n const parsedError = parseJSON({\n text: responseBody,\n schema: errorSchema,\n });\n\n return {\n responseHeaders,\n value: new APICallError({\n message: errorToMessage(parsedError),\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n data: parsedError,\n isRetryable: isRetryable?.(response, parsedError),\n }),\n };\n } catch (parseError) {\n return {\n responseHeaders,\n value: new APICallError({\n message: response.statusText,\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n isRetryable: isRetryable?.(response),\n }),\n };\n }\n };\n\nexport const createEventSourceResponseHandler =\n <T>(\n chunkSchema: ZodSchema<T>,\n ): ResponseHandler<ReadableStream<ParseResult<T>>> =>\n async ({ response }: { response: Response }) => {\n const responseHeaders = extractResponseHeaders(response);\n\n if (response.body == null) {\n throw new EmptyResponseBodyError({});\n }\n\n return {\n responseHeaders,\n value: response.body\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(new EventSourceParserStream())\n .pipeThrough(\n new TransformStream<ParsedEvent, ParseResult<T>>({\n transform({ data }, controller) {\n // ignore the 'DONE' event that e.g. OpenAI sends:\n if (data === '[DONE]') {\n return;\n }\n\n controller.enqueue(\n safeParseJSON({\n text: data,\n schema: chunkSchema,\n }),\n );\n },\n }),\n ),\n };\n };\n\nexport const createJsonStreamResponseHandler =\n <T>(\n chunkSchema: ZodSchema<T>,\n ): ResponseHandler<ReadableStream<ParseResult<T>>> =>\n async ({ response }: { response: Response }) => {\n const responseHeaders = extractResponseHeaders(response);\n\n if (response.body == null) {\n throw new EmptyResponseBodyError({});\n }\n\n let buffer = '';\n\n return {\n responseHeaders,\n value: response.body.pipeThrough(new TextDecoderStream()).pipeThrough(\n new TransformStream<string, ParseResult<T>>({\n transform(chunkText, controller) {\n if (chunkText.endsWith('\\n')) {\n controller.enqueue(\n safeParseJSON({\n text: buffer + chunkText,\n schema: chunkSchema,\n }),\n );\n buffer = '';\n } else {\n buffer += chunkText;\n }\n },\n }),\n ),\n };\n };\n\nexport const createJsonResponseHandler =\n <T>(responseSchema: ZodSchema<T>): ResponseHandler<T> =>\n async ({ response, url, requestBodyValues }) => {\n const responseBody = await response.text();\n\n const parsedResult = safeParseJSON({\n text: responseBody,\n schema: responseSchema,\n });\n\n const responseHeaders = extractResponseHeaders(response);\n\n if (!parsedResult.success) {\n throw new APICallError({\n message: 'Invalid JSON response',\n cause: parsedResult.error,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n url,\n requestBodyValues,\n });\n }\n\n return {\n responseHeaders,\n value: parsedResult.value,\n };\n };\n","export function convertBase64ToUint8Array(base64String: string) {\n const base64Url = base64String.replace(/-/g, '+').replace(/_/g, '/');\n const latin1string = globalThis.atob(base64Url);\n return Uint8Array.from(latin1string, byte => byte.codePointAt(0)!);\n}\n\nexport function convertUint8ArrayToBase64(array: Uint8Array): string {\n let latin1string = '';\n\n // Note: regular for loop to support older JavaScript versions that\n // do not support for..of on Uint8Array\n for (let i = 0; i < array.length; i++) {\n latin1string += String.fromCodePoint(array[i]);\n }\n\n return globalThis.btoa(latin1string);\n}\n","export function withoutTrailingSlash(url: string | undefined) {\n return url?.replace(/\\/$/, '');\n}\n","import { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\n\nconst workersAIErrorDataSchema = z.object({\n object: z.literal(\"error\"),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type WorkersAIErrorData = z.infer<typeof workersAIErrorDataSchema>;\n\nexport const workersAIFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: workersAIErrorDataSchema,\n errorToMessage: (data) => data.message,\n});\n","import { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\n\nexport interface WorkersAI {\n (\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n\n /**\n Creates a model for text generation.\n */\n chat(\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n}\n\nexport interface WorkersAISettings {\n /**\n Provide an `env.AI` binding to use for the AI inference. \n You can set up an AI bindings in your Workers project \n by adding the following this to `wrangler.toml`: \n \n ```toml\n[ai]\nbinding = \"AI\"\n ``` */\n binding: Ai;\n}\n\n/**\n Create a Workers AI provider instance.\n */\nexport function createWorkersAIProvider(options: WorkersAISettings): WorkersAI {\n const createChatModel = (\n modelId: BaseAiTextGenerationModels,\n settings: WorkersAIChatSettings = {}\n ) =>\n new WorkersAIChatLanguageModel(modelId, settings, {\n provider: \"workerai.chat\",\n binding: options.binding,\n });\n\n const provider = function (\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ) {\n if (new.target) {\n throw new Error(\n \"The WorkersAI model function cannot be called with the new keyword.\"\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.chat = createChatModel;\n\n return provider as WorkersAI;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAEA,QAAM,YAAY,OAAO,WAAW;AACpC,QAAM,iBAAiB;AACvB,QAAM,uBAAuB;AAE7B,aAAS,OAAQ,MAAM,SAAS,SAAS;AAEvC,UAAI,WAAW,MAAM;AACnB,YAAI,YAAY,QAAQ,OAAO,YAAY,UAAU;AACnD,oBAAU;AACV,oBAAU;AAAA,QACZ;AAAA,MACF;AAEA,UAAI,aAAa,OAAO,SAAS,IAAI,GAAG;AACtC,eAAO,KAAK,SAAS;AAAA,MACvB;AAGA,UAAI,QAAQ,KAAK,WAAW,CAAC,MAAM,OAAQ;AACzC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAGA,YAAM,MAAM,KAAK,MAAM,MAAM,OAAO;AAGpC,UAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,eAAO;AAAA,MACT;AAEA,YAAM,cAAe,WAAW,QAAQ,eAAgB;AACxD,YAAM,oBAAqB,WAAW,QAAQ,qBAAsB;AAGpE,UAAI,gBAAgB,YAAY,sBAAsB,UAAU;AAC9D,eAAO;AAAA,MACT;AAEA,UAAI,gBAAgB,YAAY,sBAAsB,UAAU;AAC9D,YAAI,eAAe,KAAK,IAAI,MAAM,SAAS,qBAAqB,KAAK,IAAI,MAAM,OAAO;AACpF,iBAAO;AAAA,QACT;AAAA,MACF,WAAW,gBAAgB,YAAY,sBAAsB,UAAU;AACrE,YAAI,eAAe,KAAK,IAAI,MAAM,OAAO;AACvC,iBAAO;AAAA,QACT;AAAA,MACF,OAAO;AACL,YAAI,qBAAqB,KAAK,IAAI,MAAM,OAAO;AAC7C,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,aAAO,OAAO,KAAK,EAAE,aAAa,mBAAmB,MAAM,WAAW,QAAQ,KAAK,CAAC;AAAA,IACtF;AAEA,aAAS,OAAQ,KAAK,EAAE,cAAc,SAAS,oBAAoB,SAAS,KAAK,IAAI,CAAC,GAAG;AACvF,UAAI,OAAO,CAAC,GAAG;AAEf,aAAO,KAAK,QAAQ;AAClB,cAAM,QAAQ;AACd,eAAO,CAAC;AAER,mBAAW,QAAQ,OAAO;AACxB,cAAI,gBAAgB,YAAY,OAAO,UAAU,eAAe,KAAK,MAAM,WAAW,GAAG;AACvF,gBAAI,SAAS,MAAM;AACjB,qBAAO;AAAA,YACT,WAAW,gBAAgB,SAAS;AAClC,oBAAM,IAAI,YAAY,8CAA8C;AAAA,YACtE;AAEA,mBAAO,KAAK;AAAA,UACd;AAEA,cAAI,sBAAsB,YACtB,OAAO,UAAU,eAAe,KAAK,MAAM,aAAa,KACxD,OAAO,UAAU,eAAe,KAAK,KAAK,aAAa,WAAW,GAAG;AACvE,gBAAI,SAAS,MAAM;AACjB,qBAAO;AAAA,YACT,WAAW,sBAAsB,SAAS;AACxC,oBAAM,IAAI,YAAY,8CAA8C;AAAA,YACtE;AAEA,mBAAO,KAAK;AAAA,UACd;AAEA,qBAAW,OAAO,MAAM;AACtB,kBAAM,QAAQ,KAAK,GAAG;AACtB,gBAAI,SAAS,OAAO,UAAU,UAAU;AACtC,mBAAK,KAAK,KAAK;AAAA,YACjB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAEA,aAAS,MAAO,MAAM,SAAS,SAAS;AACtC,YAAM,kBAAkB,MAAM;AAC9B,YAAM,kBAAkB;AACxB,UAAI;AACF,eAAO,OAAO,MAAM,SAAS,OAAO;AAAA,MACtC,UAAE;AACA,cAAM,kBAAkB;AAAA,MAC1B;AAAA,IACF;AAEA,aAAS,UAAW,MAAM,SAAS;AACjC,YAAM,kBAAkB,MAAM;AAC9B,YAAM,kBAAkB;AACxB,UAAI;AACF,eAAO,OAAO,MAAM,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,MAC7C,SAAS,IAAI;AACX,eAAO;AAAA,MACT,UAAE;AACA,cAAM,kBAAkB;AAAA,MAC1B;AAAA,IACF;AAEA,WAAO,UAAU;AACjB,WAAO,QAAQ,UAAU;AACzB,WAAO,QAAQ,QAAQ;AACvB,WAAO,QAAQ,YAAY;AAC3B,WAAO,QAAQ,OAAO;AAAA;AAAA;;;AC7HtB;AAAA,EAKE,iCAAAA;AAAA,OACK;AAOP,SAAS,KAAAC,UAAS;;;ACblB;AAAA,EAEE;AAAA,OACK;AAIA,SAAS,+BACd,QACqB;AACrB,QAAM,WAAgC,CAAC;AAEvC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,CAAC,SAAS;AACb,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvGA,IAAI,iBAAiB,CAAC,UAAU,cAAc,OAAO;AACnD,SAAO,CAAC,OAAO,gBAAgB;AAC7B,QAAI,KAAK;AACT,QAAI,IAAI;AACR,WAAO,KAAK;AACV,YAAM,SAAU,KAAK,OAAO,IAAI,SAAS,SAAU,CAAC;AAAA,IACtD;AACA,WAAO;AAAA,EACT;AACF;;;ASVA,+BAAuB;AFDvB,SAAS,uBAAuB;ACAhC,SAAS,wBAAwB;ACAjC,SAAS,gBAAgB,uBAAAC,4BAA2B;ACApD,SAAS,2BAA2B;AEApC,SAAS,oBAAoB;AEA7B,SAAS,gBAAAC,eAAc,8BAA8B;AXM9C,SAAS,uBACd,UACwB;AACxB,QAAM,UAAkC,CAAC;AACzC,WAAS,QAAQ,QAAQ,CAAC,OAAO,QAAQ;AACvC,YAAQ,GAAG,IAAI;EACjB,CAAC;AACD,SAAO;AACT;ACTO,IAAM,aAAa;EACxB;EACA;AACF;AOHO,IAAM,kBAAkB,OAAO,qBAAqB;AAsBpD,SAAS,UACd,UAGmB;AACnB,SAAO,EAAE,CAAC,eAAe,GAAG,MAAM,SAAS;AAC7C;AAEO,SAAS,YAAY,OAAoC;AAC9D,SACE,OAAO,UAAU,YACjB,UAAU,QACV,mBAAmB,SACnB,MAAM,eAAe,MAAM,QAC3B,cAAc;AAElB;AAEO,SAAS,aACd,WACmB;AACnB,SAAO,UAAU,CAAA,UAAS;AACxB,UAAM,SAAS,UAAU,UAAU,KAAK;AACxC,WAAO,OAAO,UACV,EAAE,SAAS,MAAM,OAAO,OAAO,KAAK,IACpC,EAAE,SAAS,OAAO,OAAO,OAAO,MAAM;EAC5C,CAAC;AACH;ADzCO,SAAS,cAAiB;EAC/B;EACA,QAAQ;AACV,GAGM;AACJ,QAAM,SAAS,kBAAkB,EAAE,OAAO,QAAQ,YAAY,CAAC;AAE/D,MAAI,CAAC,OAAO,SAAS;AACnB,UAAM,IAAI,oBAAoB,EAAE,OAAO,OAAO,OAAO,MAAM,CAAC;EAC9D;AAEA,SAAO,OAAO;AAChB;AAWO,SAAS,kBAAqB;EACnC;EACA,QAAQ;AACV,GAKmD;AACjD,QAAM,SAAS,YAAY,WAAW,IAClC,cACA,aAAa,WAAW;AAE5B,MAAI;AACF,QAAI,OAAO,YAAY,MAAM;AAC3B,aAAO,EAAE,SAAS,MAAM,MAAkB;IAC5C;AAEA,UAAM,mBAAmB,OAAO,SAAS,KAAK;AAE9C,QAAI,iBAAiB,SAAS;AAC5B,aAAO;IACT;AAEA,WAAO;MACL,SAAS;MACT,OAAO,IAAI,oBAAoB;QAC7B;QACA,OAAO,iBAAiB;MAC1B,CAAC;IACH;EACF,SAAS,OAAO;AACd,WAAO;MACL,SAAS;MACT,OAAO,oBAAoB,sBAAsB,KAAK,IAClD,QACA,IAAI,oBAAoB,EAAE,OAAO,OAAO,MAAM,CAAC;IACrD;EACF;AACF;ADjDO,SAAS,UAAa;EAC3B;EACA;AACF,GAGM;AACJ,MAAI;AACF,UAAM,QAAQ,yBAAAC,QAAW,MAAM,IAAI;AAEnC,QAAI,UAAU,MAAM;AAClB,aAAO;IACT;AAEA,WAAO,cAAc,EAAE,OAAO,OAAO,CAAC;EACxC,SAAS,OAAO;AACd,QACE,eAAe,iBAAiB,KAAK,KACrCC,qBAAoB,sBAAsB,KAAK,GAC/C;AACA,YAAM;IACR;AAEA,UAAM,IAAI,eAAe,EAAE,MAAM,OAAO,MAAM,CAAC;EACjD;AACF;AKnCO,IAAM,iCACX,CAAI;EACF;EACA;EACA;AACF,MAKA,OAAO,EAAE,UAAU,KAAK,kBAAkB,MAAM;AAC9C,QAAM,eAAe,MAAM,SAAS,KAAK;AACzC,QAAM,kBAAkB,uBAAuB,QAAQ;AAGvD,MAAI,aAAa,KAAK,MAAM,IAAI;AAC9B,WAAO;MACL;MACA,OAAO,IAAIC,cAAa;QACtB,SAAS,SAAS;QAClB;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,aAAa,eAAA,OAAA,SAAA,YAAc,QAAA;MAC7B,CAAC;IACH;EACF;AAGA,MAAI;AACF,UAAM,cAAc,UAAU;MAC5B,MAAM;MACN,QAAQ;IACV,CAAC;AAED,WAAO;MACL;MACA,OAAO,IAAIA,cAAa;QACtB,SAAS,eAAe,WAAW;QACnC;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,MAAM;QACN,aAAa,eAAA,OAAA,SAAA,YAAc,UAAU,WAAA;MACvC,CAAC;IACH;EACF,SAAS,YAAY;AACnB,WAAO;MACL;MACA,OAAO,IAAIA,cAAa;QACtB,SAAS,SAAS;QAClB;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,aAAa,eAAA,OAAA,SAAA,YAAc,QAAA;MAC7B,CAAC;IACH;EACF;AACF;;;AGjFF,SAAS,SAAS;AAElB,IAAM,2BAA2B,EAAE,OAAO;AAAA,EACxC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,iCAAiC,+BAA+B;AAAA,EAC3E,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC,CAAC;;;AnBGD,SAAS,cAAc;AAOhB,IAAM,6BAAN,MAA4D;AAAA,EASjE,YACE,SACA,UACA,QACA;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOf,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,+BAA+B,MAAM;AAAA,IACjD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAIA,KAAK,kBAAkB;AACrB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,YAAQ,KAAK,QAAQ;AAErB,UAAM,WAAY,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO;AAAA,MAC1D,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,cAAc;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO;AAAA,QACL,cAAc;AAAA,QACd,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAY,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO;AAAA,MAC1D,UAAU,KAAK;AAAA,MACf,QAAQ;AAAA,IACV,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,UAAU,OAAO,YAAY;AACjC,kBAAM,cAAc,IAAI,YAAY,EAAE;AAAA,cACpC;AAAA,YACF;AACA,kBAAM,SAAS,OAAO,IAAI,SAAS,WAAW,CAAC;AAC/C,6BAAiB,eAAe,QAAQ;AACtC,kBAAI,CAAC,YAAY,MAAM;AACrB;AAAA,cACF;AACA,kBAAI,YAAY,SAAS,UAAU;AACjC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,OAAO;AAAA,oBACL,cAAc;AAAA,oBACd,kBAAkB;AAAA,kBACpB;AAAA,gBACF,CAAC;AACD;AAAA,cACF;AACA,oBAAM,OAAO,KAAK,MAAM,YAAY,IAAI;AACxC,sBAAQ,IAAI,QAAQ,IAAI;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,KAAK,YAAY;AAAA,cAC9B,CAAC;AAAA,YACH;AACA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAO;AAAA,gBACL,cAAc;AAAA,gBACd,kBAAkB;AAAA,cACpB;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AACF;AAGA,IAAM,8BAA8BC,GAAE,OAAO;AAAA,EAC3C,UAAUA,GAAE,OAAO;AACrB,CAAC;AAID,IAAM,2BAA2BA,GAAE,WAAW,UAAU;AAExD,SAAS,0BACP,MAGA;AAEA,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACpD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACvC,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,IAAI,MAAM,IAAI;AAEtB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA,IAIlD,KAAK;AACH,aAAO;AAAA,QACL,OAAO,YAAY;AAAA,UACjB,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW;AAAA,QAC9C;AAAA,QACA,aAAa;AAAA,MACf;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,MAAM,iCAAiC,gBAAgB,EAAE;AAAA,IACrE;AAAA,EACF;AACF;;;AoBxPO,SAAS,wBAAwB,SAAuC;AAC7E,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAEnC,IAAI,2BAA2B,SAAS,UAAU;AAAA,IAChD,UAAU;AAAA,IACV,SAAS,QAAQ;AAAA,EACnB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,SAAO;AACT;","names":["UnsupportedFunctionalityError","z","TypeValidationError","APICallError","SecureJSON","TypeValidationError","APICallError","UnsupportedFunctionalityError","z"]}
package/package.json ADDED
@@ -0,0 +1,32 @@
1
+ {
2
+ "name": "workers-ai-provider",
3
+ "description": "Workers AI Provider for the vercel AI SDK",
4
+ "type": "module",
5
+ "version": "0.0.1",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "scripts": {
9
+ "build": "rm -rf dist && tsup src/index.ts --dts --sourcemap --format esm --target es2020"
10
+ },
11
+ "dependencies": {
12
+ "@ai-sdk/provider": "^0.0.14",
13
+ "ai": "^3.2.40",
14
+ "fetch-event-stream": "^0.1.5",
15
+ "zod": "^3.23.8"
16
+ },
17
+ "files": [
18
+ "dist",
19
+ "src",
20
+ "README.md",
21
+ "LICENSE",
22
+ "package.json"
23
+ ],
24
+ "devDependencies": {
25
+ "@changesets/changelog-github": "^0.5.0",
26
+ "@changesets/cli": "^2.27.5",
27
+ "@cloudflare/workers-types": "^4.20240725.0",
28
+ "@types/bun": "^1.1.6",
29
+ "tsup": "^8.2.3",
30
+ "typescript": "^5.5.4"
31
+ }
32
+ }
@@ -0,0 +1,106 @@
1
+ import {
2
+ type LanguageModelV1Prompt,
3
+ UnsupportedFunctionalityError,
4
+ } from "@ai-sdk/provider";
5
+ import type { WorkersAIChatPrompt } from "./workersai-chat-prompt";
6
+
7
+ // TODO
8
+ export function convertToWorkersAIChatMessages(
9
+ prompt: LanguageModelV1Prompt
10
+ ): WorkersAIChatPrompt {
11
+ const messages: WorkersAIChatPrompt = [];
12
+
13
+ for (const { role, content } of prompt) {
14
+ switch (role) {
15
+ case "system": {
16
+ messages.push({ role: "system", content });
17
+ break;
18
+ }
19
+
20
+ case "user": {
21
+ messages.push({
22
+ role: "user",
23
+ content: content
24
+ .map((part) => {
25
+ switch (part.type) {
26
+ case "text": {
27
+ return part.text;
28
+ }
29
+ case "image": {
30
+ throw new UnsupportedFunctionalityError({
31
+ functionality: "image-part",
32
+ });
33
+ }
34
+ }
35
+ })
36
+ .join(""),
37
+ });
38
+ break;
39
+ }
40
+
41
+ case "assistant": {
42
+ let text = "";
43
+ const toolCalls: Array<{
44
+ id: string;
45
+ type: "function";
46
+ function: { name: string; arguments: string };
47
+ }> = [];
48
+
49
+ for (const part of content) {
50
+ switch (part.type) {
51
+ case "text": {
52
+ text += part.text;
53
+ break;
54
+ }
55
+ case "tool-call": {
56
+ toolCalls.push({
57
+ id: part.toolCallId,
58
+ type: "function",
59
+ function: {
60
+ name: part.toolName,
61
+ arguments: JSON.stringify(part.args),
62
+ },
63
+ });
64
+ break;
65
+ }
66
+ default: {
67
+ const _exhaustiveCheck: never = part;
68
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
69
+ }
70
+ }
71
+ }
72
+
73
+ messages.push({
74
+ role: "assistant",
75
+ content: text,
76
+ tool_calls:
77
+ toolCalls.length > 0
78
+ ? toolCalls.map(({ function: { name, arguments: args } }) => ({
79
+ id: "null",
80
+ type: "function",
81
+ function: { name, arguments: args },
82
+ }))
83
+ : undefined,
84
+ });
85
+
86
+ break;
87
+ }
88
+ case "tool": {
89
+ for (const toolResponse of content) {
90
+ messages.push({
91
+ role: "tool",
92
+ name: toolResponse.toolName,
93
+ content: JSON.stringify(toolResponse.result),
94
+ });
95
+ }
96
+ break;
97
+ }
98
+ default: {
99
+ const _exhaustiveCheck: never = role;
100
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
101
+ }
102
+ }
103
+ }
104
+
105
+ return messages;
106
+ }
package/src/index.ts ADDED
@@ -0,0 +1,62 @@
1
+ import { WorkersAIChatLanguageModel } from "./workersai-chat-language-model";
2
+ import type { WorkersAIChatSettings } from "./workersai-chat-settings";
3
+
4
+ export interface WorkersAI {
5
+ (
6
+ modelId: BaseAiTextGenerationModels,
7
+ settings?: WorkersAIChatSettings
8
+ ): WorkersAIChatLanguageModel;
9
+
10
+ /**
11
+ * Creates a model for text generation.
12
+ **/
13
+ chat(
14
+ modelId: BaseAiTextGenerationModels,
15
+ settings?: WorkersAIChatSettings
16
+ ): WorkersAIChatLanguageModel;
17
+ }
18
+
19
+ export interface WorkersAISettings {
20
+ /**
21
+ * Provide an `env.AI` binding to use for the AI inference.
22
+ * You can set up an AI bindings in your Workers project
23
+ * by adding the following this to `wrangler.toml`:
24
+
25
+ ```toml
26
+ [ai]
27
+ binding = "AI"
28
+ ```
29
+ **/
30
+ binding: Ai;
31
+ }
32
+
33
+ /**
34
+ * Create a Workers AI provider instance.
35
+ **/
36
+ export function createWorkersAI(options: WorkersAISettings): WorkersAI {
37
+ const createChatModel = (
38
+ modelId: BaseAiTextGenerationModels,
39
+ settings: WorkersAIChatSettings = {}
40
+ ) =>
41
+ new WorkersAIChatLanguageModel(modelId, settings, {
42
+ provider: "workerai.chat",
43
+ binding: options.binding,
44
+ });
45
+
46
+ const provider = function (
47
+ modelId: BaseAiTextGenerationModels,
48
+ settings?: WorkersAIChatSettings
49
+ ) {
50
+ if (new.target) {
51
+ throw new Error(
52
+ "The WorkersAI model function cannot be called with the new keyword."
53
+ );
54
+ }
55
+
56
+ return createChatModel(modelId, settings);
57
+ };
58
+
59
+ provider.chat = createChatModel;
60
+
61
+ return provider as WorkersAI;
62
+ }
@@ -0,0 +1,17 @@
1
+ import type { LanguageModelV1FinishReason } from "@ai-sdk/provider";
2
+
3
+ export function mapWorkersAIFinishReason(
4
+ finishReason: string | null | undefined
5
+ ): LanguageModelV1FinishReason {
6
+ switch (finishReason) {
7
+ case "stop":
8
+ return "stop";
9
+ case "length":
10
+ case "model_length":
11
+ return "length";
12
+ case "tool_calls":
13
+ return "tool-calls";
14
+ default:
15
+ return "other";
16
+ }
17
+ }
@@ -0,0 +1,283 @@
1
+ import {
2
+ type LanguageModelV1,
3
+ type LanguageModelV1CallWarning,
4
+ type LanguageModelV1FinishReason,
5
+ type LanguageModelV1StreamPart,
6
+ UnsupportedFunctionalityError,
7
+ } from "@ai-sdk/provider";
8
+ import type {
9
+ ParseResult,
10
+ createEventSourceResponseHandler,
11
+ createJsonResponseHandler,
12
+ postJsonToApi,
13
+ } from "@ai-sdk/provider-utils";
14
+ import { z } from "zod";
15
+ import { convertToWorkersAIChatMessages } from "./convert-to-workersai-chat-messages";
16
+ import { mapWorkersAIFinishReason } from "./map-workersai-finish-reason";
17
+ import type { WorkersAIChatSettings } from "./workersai-chat-settings";
18
+ import { workersAIFailedResponseHandler } from "./workersai-error";
19
+
20
+ import { events } from "fetch-event-stream";
21
+
22
+ type WorkersAIChatConfig = {
23
+ provider: string;
24
+ binding: Ai;
25
+ };
26
+
27
+ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
28
+ readonly specificationVersion = "v1";
29
+ readonly defaultObjectGenerationMode = "json";
30
+
31
+ readonly modelId: BaseAiTextGenerationModels;
32
+ readonly settings: WorkersAIChatSettings;
33
+
34
+ private readonly config: WorkersAIChatConfig;
35
+
36
+ constructor(
37
+ modelId: BaseAiTextGenerationModels,
38
+ settings: WorkersAIChatSettings,
39
+ config: WorkersAIChatConfig
40
+ ) {
41
+ this.modelId = modelId;
42
+ this.settings = settings;
43
+ this.config = config;
44
+ }
45
+
46
+ get provider(): string {
47
+ return this.config.provider;
48
+ }
49
+
50
+ private getArgs({
51
+ mode,
52
+ prompt,
53
+ maxTokens,
54
+ temperature,
55
+ topP,
56
+ frequencyPenalty,
57
+ presencePenalty,
58
+ seed,
59
+ }: Parameters<LanguageModelV1["doGenerate"]>[0]) {
60
+ const type = mode.type;
61
+
62
+ const warnings: LanguageModelV1CallWarning[] = [];
63
+
64
+ if (frequencyPenalty != null) {
65
+ warnings.push({
66
+ type: "unsupported-setting",
67
+ setting: "frequencyPenalty",
68
+ });
69
+ }
70
+
71
+ if (presencePenalty != null) {
72
+ warnings.push({
73
+ type: "unsupported-setting",
74
+ setting: "presencePenalty",
75
+ });
76
+ }
77
+
78
+ const baseArgs = {
79
+ // model id:
80
+ model: this.modelId,
81
+
82
+ // model specific settings:
83
+ safe_prompt: this.settings.safePrompt,
84
+
85
+ // standardized settings:
86
+ max_tokens: maxTokens,
87
+ temperature,
88
+ top_p: topP,
89
+ random_seed: seed,
90
+
91
+ // messages:
92
+ messages: convertToWorkersAIChatMessages(prompt),
93
+ };
94
+
95
+ switch (type) {
96
+ case "regular": {
97
+ return {
98
+ args: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },
99
+ warnings,
100
+ };
101
+ }
102
+
103
+ case "object-json": {
104
+ return {
105
+ args: {
106
+ ...baseArgs,
107
+ response_format: { type: "json_object" },
108
+ },
109
+ warnings,
110
+ };
111
+ }
112
+
113
+ case "object-tool": {
114
+ return {
115
+ args: {
116
+ ...baseArgs,
117
+ tool_choice: "any",
118
+ tools: [{ type: "function", function: mode.tool }],
119
+ },
120
+ warnings,
121
+ };
122
+ }
123
+
124
+ // @ts-expect-error - this is unreachable code
125
+ // TODO: fixme
126
+ case "object-grammar": {
127
+ throw new UnsupportedFunctionalityError({
128
+ functionality: "object-grammar mode",
129
+ });
130
+ }
131
+
132
+ default: {
133
+ const _exhaustiveCheck: never = type;
134
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
135
+ }
136
+ }
137
+ }
138
+
139
+ async doGenerate(
140
+ options: Parameters<LanguageModelV1["doGenerate"]>[0]
141
+ ): Promise<Awaited<ReturnType<LanguageModelV1["doGenerate"]>>> {
142
+ const { args, warnings } = this.getArgs(options);
143
+
144
+ console.warn(warnings);
145
+
146
+ const response = (await this.config.binding.run(args.model, {
147
+ messages: args.messages,
148
+ })) as { response: string };
149
+
150
+ return {
151
+ text: response.response,
152
+ finishReason: "stop",
153
+ rawCall: { rawPrompt: args.messages, rawSettings: args },
154
+ usage: {
155
+ promptTokens: 0,
156
+ completionTokens: 0,
157
+ },
158
+ };
159
+ }
160
+
161
+ async doStream(
162
+ options: Parameters<LanguageModelV1["doStream"]>[0]
163
+ ): Promise<Awaited<ReturnType<LanguageModelV1["doStream"]>>> {
164
+ const { args, warnings } = this.getArgs(options);
165
+
166
+ const response = (await this.config.binding.run(args.model, {
167
+ messages: args.messages,
168
+ stream: true,
169
+ })) as ReadableStream;
170
+
171
+ return {
172
+ stream: response.pipeThrough(
173
+ new TransformStream<
174
+ ParseResult<z.infer<typeof workersAIChatChunkSchema>>,
175
+ LanguageModelV1StreamPart
176
+ >({
177
+ async transform(chunk, controller) {
178
+ const chunkToText = new TextDecoder().decode(
179
+ chunk as unknown as Uint8Array
180
+ );
181
+ const chunks = events(new Response(chunkToText));
182
+ for await (const singleChunk of chunks) {
183
+ if (!singleChunk.data) {
184
+ continue;
185
+ }
186
+ if (singleChunk.data === "[DONE]") {
187
+ controller.enqueue({
188
+ type: "finish",
189
+ finishReason: "stop",
190
+ usage: {
191
+ promptTokens: 0,
192
+ completionTokens: 0,
193
+ },
194
+ });
195
+ return;
196
+ }
197
+ const data = JSON.parse(singleChunk.data);
198
+ console.log("data", data);
199
+ controller.enqueue({
200
+ type: "text-delta",
201
+ textDelta: data.response ?? "DATALOSS",
202
+ });
203
+ }
204
+ controller.enqueue({
205
+ type: "finish",
206
+ finishReason: "stop",
207
+ usage: {
208
+ promptTokens: 0,
209
+ completionTokens: 0,
210
+ },
211
+ });
212
+ },
213
+ })
214
+ ),
215
+ rawCall: { rawPrompt: args.messages, rawSettings: args },
216
+ warnings,
217
+ };
218
+ }
219
+ }
220
+ // limited version of the schema, focussed on what is needed for the implementation
221
+ // this approach limits breakages when the API changes and increases efficiency
222
+ const workersAIChatResponseSchema = z.object({
223
+ response: z.string(),
224
+ });
225
+
226
+ // limited version of the schema, focussed on what is needed for the implementation
227
+ // this approach limits breakages when the API changes and increases efficiency
228
+ const workersAIChatChunkSchema = z.instanceof(Uint8Array);
229
+
230
+ function prepareToolsAndToolChoice(
231
+ mode: Parameters<LanguageModelV1["doGenerate"]>[0]["mode"] & {
232
+ type: "regular";
233
+ }
234
+ ) {
235
+ // when the tools array is empty, change it to undefined to prevent errors:
236
+ const tools = mode.tools?.length ? mode.tools : undefined;
237
+
238
+ if (tools == null) {
239
+ return { tools: undefined, tool_choice: undefined };
240
+ }
241
+
242
+ const mappedTools = tools.map((tool) => ({
243
+ type: "function",
244
+ function: {
245
+ name: tool.name,
246
+ description: tool.description,
247
+ parameters: tool.parameters,
248
+ },
249
+ }));
250
+
251
+ const toolChoice = mode.toolChoice;
252
+
253
+ if (toolChoice == null) {
254
+ return { tools: mappedTools, tool_choice: undefined };
255
+ }
256
+
257
+ const type = toolChoice.type;
258
+
259
+ console.log(mode, type);
260
+
261
+ switch (type) {
262
+ case "auto":
263
+ return { tools: mappedTools, tool_choice: type };
264
+ case "none":
265
+ return { tools: mappedTools, tool_choice: type };
266
+ case "required":
267
+ return { tools: mappedTools, tool_choice: "any" };
268
+
269
+ // workersAI does not support tool mode directly,
270
+ // so we filter the tools and force the tool choice through 'any'
271
+ case "tool":
272
+ return {
273
+ tools: mappedTools.filter(
274
+ (tool) => tool.function.name === toolChoice.toolName
275
+ ),
276
+ tool_choice: "any",
277
+ };
278
+ default: {
279
+ const _exhaustiveCheck: never = type;
280
+ throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
281
+ }
282
+ }
283
+ }
@@ -0,0 +1,33 @@
1
+ export type WorkersAIChatPrompt = Array<WorkersAIChatMessage>;
2
+
3
+ export type WorkersAIChatMessage =
4
+ | WorkersAISystemMessage
5
+ | WorkersAIUserMessage
6
+ | WorkersAIAssistantMessage
7
+ | WorkersAIToolMessage;
8
+
9
+ export interface WorkersAISystemMessage {
10
+ role: "system";
11
+ content: string;
12
+ }
13
+
14
+ export interface WorkersAIUserMessage {
15
+ role: "user";
16
+ content: string;
17
+ }
18
+
19
+ export interface WorkersAIAssistantMessage {
20
+ role: "assistant";
21
+ content: string;
22
+ tool_calls?: Array<{
23
+ id: string;
24
+ type: "function";
25
+ function: { name: string; arguments: string };
26
+ }>;
27
+ }
28
+
29
+ export interface WorkersAIToolMessage {
30
+ role: "tool";
31
+ name: string;
32
+ content: string;
33
+ }
@@ -0,0 +1,8 @@
1
+ export interface WorkersAIChatSettings {
2
+ /**
3
+ Whether to inject a safety prompt before all conversations.
4
+
5
+ Defaults to `false`.
6
+ */
7
+ safePrompt?: boolean;
8
+ }