workers-ai-provider 0.2.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,6 +1,10 @@
1
1
  import { LanguageModelV1, ImageModelV1 } from '@ai-sdk/provider';
2
2
 
3
- interface WorkersAIChatSettings {
3
+ type StringLike = string | {
4
+ toString(): string;
5
+ };
6
+
7
+ type WorkersAIChatSettings = {
4
8
  /**
5
9
  * Whether to inject a safety prompt before all conversations.
6
10
  * Defaults to `false`.
@@ -11,7 +15,12 @@ interface WorkersAIChatSettings {
11
15
  * @deprecated
12
16
  */
13
17
  gateway?: GatewayOptions;
14
- }
18
+ } & {
19
+ /**
20
+ * Passthrough settings that are provided directly to the run function.
21
+ */
22
+ [key: string]: StringLike;
23
+ };
15
24
 
16
25
  /**
17
26
  * The names of the BaseAiTextGeneration models.
package/dist/index.js CHANGED
@@ -13,7 +13,22 @@ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "
13
13
  function createRun(config) {
14
14
  const { accountId, apiKey } = config;
15
15
  return async function run(model, inputs, options) {
16
- const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}`;
16
+ const { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } = options || {};
17
+ const urlParams = new URLSearchParams();
18
+ for (const [key, value] of Object.entries(passthroughOptions)) {
19
+ try {
20
+ const valueStr = value.toString();
21
+ if (!valueStr) {
22
+ continue;
23
+ }
24
+ urlParams.append(key, valueStr);
25
+ } catch (error) {
26
+ throw new Error(
27
+ `Value for option '${key}' is not able to be coerced into a string.`
28
+ );
29
+ }
30
+ }
31
+ const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${urlParams ? `?${urlParams}` : ""}`;
17
32
  const headers = {
18
33
  "Content-Type": "application/json",
19
34
  Authorization: `Bearer ${apiKey}`
@@ -24,7 +39,7 @@ function createRun(config) {
24
39
  headers,
25
40
  body
26
41
  });
27
- if (options?.returnRawResponse) {
42
+ if (returnRawResponse) {
28
43
  return response;
29
44
  }
30
45
  if (inputs.stream === true) {
@@ -444,6 +459,7 @@ var WorkersAIChatLanguageModel = class {
444
459
  }
445
460
  async doGenerate(options) {
446
461
  const { args, warnings } = this.getArgs(options);
462
+ const { gateway, safePrompt, ...passthroughOptions } = this.settings;
447
463
  const output = await this.config.binding.run(
448
464
  args.model,
449
465
  {
@@ -455,7 +471,7 @@ var WorkersAIChatLanguageModel = class {
455
471
  // @ts-expect-error response_format not yet added to types
456
472
  response_format: args.response_format
457
473
  },
458
- { gateway: this.config.gateway ?? this.settings.gateway }
474
+ { gateway: this.config.gateway ?? gateway, ...passthroughOptions }
459
475
  );
460
476
  if (output instanceof ReadableStream) {
461
477
  throw new Error("This shouldn't happen");
@@ -511,6 +527,7 @@ var WorkersAIChatLanguageModel = class {
511
527
  warnings
512
528
  };
513
529
  }
530
+ const { gateway, ...passthroughOptions } = this.settings;
514
531
  const response = await this.config.binding.run(
515
532
  args.model,
516
533
  {
@@ -523,7 +540,7 @@ var WorkersAIChatLanguageModel = class {
523
540
  // @ts-expect-error response_format not yet added to types
524
541
  response_format: args.response_format
525
542
  },
526
- { gateway: this.config.gateway ?? this.settings.gateway }
543
+ { gateway: this.config.gateway ?? gateway, ...passthroughOptions }
527
544
  );
528
545
  if (!(response instanceof ReadableStream)) {
529
546
  throw new Error("This shouldn't happen");
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/utils.ts","../../../node_modules/@ai-sdk/provider/src/errors/ai-sdk-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/api-call-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/empty-response-body-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/get-error-message.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-argument-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-prompt-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-response-data-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/json-parse-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/load-api-key-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/load-setting-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/no-content-generated-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/no-such-model-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/too-many-embedding-values-for-call-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/type-validation-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/unsupported-functionality-error.ts","../../../node_modules/@ai-sdk/provider/src/json-value/is-json.ts","../src/convert-to-workersai-chat-messages.ts","../../../node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/fetch-event-stream/esm/mod.js","../src/map-workersai-usage.ts","../src/workersai-chat-language-model.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (options?.returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n","/**\n * Symbol used for identifying AI SDK Error instances.\n * Enables checking if an error is an instance of AISDKError across package versions.\n */\nconst marker = 'vercel.ai.error';\nconst symbol = Symbol.for(marker);\n\n/**\n * Custom error class for AI SDK related errors.\n * @extends Error\n */\nexport class AISDKError extends Error {\n private readonly [symbol] = true; // used in isInstance\n\n /**\n * The underlying cause of the error, if any.\n */\n readonly cause?: unknown;\n\n /**\n * Creates an AI SDK Error.\n *\n * @param {Object} params - The parameters for creating the error.\n * @param {string} params.name - The name of the error.\n * @param {string} params.message - The error message.\n * @param {unknown} [params.cause] - The underlying cause of the error.\n */\n constructor({\n name,\n message,\n cause,\n }: {\n name: string;\n message: string;\n cause?: unknown;\n }) {\n super(message);\n\n this.name = name;\n this.cause = cause;\n }\n\n /**\n * Checks if the given error is an AI SDK Error.\n * @param {unknown} error - The error to check.\n * @returns {boolean} True if the error is an AI SDK Error, false otherwise.\n */\n static isInstance(error: unknown): error is AISDKError {\n return AISDKError.hasMarker(error, marker);\n }\n\n protected static hasMarker(error: unknown, marker: string): boolean {\n const markerSymbol = Symbol.for(marker);\n return (\n error != null &&\n typeof error === 'object' &&\n markerSymbol in error &&\n typeof error[markerSymbol] === 'boolean' &&\n error[markerSymbol] === true\n );\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_APICallError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class APICallError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly url: string;\n readonly requestBodyValues: unknown;\n readonly statusCode?: number;\n\n readonly responseHeaders?: Record<string, string>;\n readonly responseBody?: string;\n\n readonly isRetryable: boolean;\n readonly data?: unknown;\n\n constructor({\n message,\n url,\n requestBodyValues,\n statusCode,\n responseHeaders,\n responseBody,\n cause,\n isRetryable = statusCode != null &&\n (statusCode === 408 || // request timeout\n statusCode === 409 || // conflict\n statusCode === 429 || // too many requests\n statusCode >= 500), // server error\n data,\n }: {\n message: string;\n url: string;\n requestBodyValues: unknown;\n statusCode?: number;\n responseHeaders?: Record<string, string>;\n responseBody?: string;\n cause?: unknown;\n isRetryable?: boolean;\n data?: unknown;\n }) {\n super({ name, message, cause });\n\n this.url = url;\n this.requestBodyValues = requestBodyValues;\n this.statusCode = statusCode;\n this.responseHeaders = responseHeaders;\n this.responseBody = responseBody;\n this.isRetryable = isRetryable;\n this.data = data;\n }\n\n static isInstance(error: unknown): error is APICallError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_EmptyResponseBodyError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class EmptyResponseBodyError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message = 'Empty response body' }: { message?: string } = {}) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is EmptyResponseBodyError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","export function getErrorMessage(error: unknown | undefined) {\n if (error == null) {\n return 'unknown error';\n }\n\n if (typeof error === 'string') {\n return error;\n }\n\n if (error instanceof Error) {\n return error.message;\n }\n\n return JSON.stringify(error);\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidArgumentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * A function argument is invalid.\n */\nexport class InvalidArgumentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly argument: string;\n\n constructor({\n message,\n cause,\n argument,\n }: {\n argument: string;\n message: string;\n cause?: unknown;\n }) {\n super({ name, message, cause });\n\n this.argument = argument;\n }\n\n static isInstance(error: unknown): error is InvalidArgumentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidPromptError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * A prompt is invalid. This error should be thrown by providers when they cannot\n * process a prompt.\n */\nexport class InvalidPromptError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly prompt: unknown;\n\n constructor({\n prompt,\n message,\n cause,\n }: {\n prompt: unknown;\n message: string;\n cause?: unknown;\n }) {\n super({ name, message: `Invalid prompt: ${message}`, cause });\n\n this.prompt = prompt;\n }\n\n static isInstance(error: unknown): error is InvalidPromptError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidResponseDataError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * Server returned a response with invalid data content.\n * This should be thrown by providers when they cannot parse the response from the API.\n */\nexport class InvalidResponseDataError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly data: unknown;\n\n constructor({\n data,\n message = `Invalid response data: ${JSON.stringify(data)}.`,\n }: {\n data: unknown;\n message?: string;\n }) {\n super({ name, message });\n\n this.data = data;\n }\n\n static isInstance(error: unknown): error is InvalidResponseDataError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\nimport { getErrorMessage } from './get-error-message';\n\nconst name = 'AI_JSONParseError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n// TODO v5: rename to ParseError\nexport class JSONParseError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly text: string;\n\n constructor({ text, cause }: { text: string; cause: unknown }) {\n super({\n name,\n message:\n `JSON parsing failed: ` +\n `Text: ${text}.\\n` +\n `Error message: ${getErrorMessage(cause)}`,\n cause,\n });\n\n this.text = text;\n }\n\n static isInstance(error: unknown): error is JSONParseError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_LoadAPIKeyError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class LoadAPIKeyError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message }: { message: string }) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is LoadAPIKeyError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_LoadSettingError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class LoadSettingError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message }: { message: string }) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is LoadSettingError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_NoContentGeneratedError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\nThrown when the AI provider fails to generate any content.\n */\nexport class NoContentGeneratedError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({\n message = 'No content generated.',\n }: { message?: string } = {}) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is NoContentGeneratedError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_NoSuchModelError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchModelError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly modelId: string;\n readonly modelType: 'languageModel' | 'textEmbeddingModel' | 'imageModel';\n\n constructor({\n errorName = name,\n modelId,\n modelType,\n message = `No such ${modelType}: ${modelId}`,\n }: {\n errorName?: string;\n modelId: string;\n modelType: 'languageModel' | 'textEmbeddingModel' | 'imageModel';\n message?: string;\n }) {\n super({ name: errorName, message });\n\n this.modelId = modelId;\n this.modelType = modelType;\n }\n\n static isInstance(error: unknown): error is NoSuchModelError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_TooManyEmbeddingValuesForCallError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class TooManyEmbeddingValuesForCallError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly provider: string;\n readonly modelId: string;\n readonly maxEmbeddingsPerCall: number;\n readonly values: Array<unknown>;\n\n constructor(options: {\n provider: string;\n modelId: string;\n maxEmbeddingsPerCall: number;\n values: Array<unknown>;\n }) {\n super({\n name,\n message:\n `Too many values for a single embedding call. ` +\n `The ${options.provider} model \"${options.modelId}\" can only embed up to ` +\n `${options.maxEmbeddingsPerCall} values per call, but ${options.values.length} values were provided.`,\n });\n\n this.provider = options.provider;\n this.modelId = options.modelId;\n this.maxEmbeddingsPerCall = options.maxEmbeddingsPerCall;\n this.values = options.values;\n }\n\n static isInstance(\n error: unknown,\n ): error is TooManyEmbeddingValuesForCallError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\nimport { getErrorMessage } from './get-error-message';\n\nconst name = 'AI_TypeValidationError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class TypeValidationError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly value: unknown;\n\n constructor({ value, cause }: { value: unknown; cause: unknown }) {\n super({\n name,\n message:\n `Type validation failed: ` +\n `Value: ${JSON.stringify(value)}.\\n` +\n `Error message: ${getErrorMessage(cause)}`,\n cause,\n });\n\n this.value = value;\n }\n\n static isInstance(error: unknown): error is TypeValidationError {\n return AISDKError.hasMarker(error, marker);\n }\n\n /**\n * Wraps an error into a TypeValidationError.\n * If the cause is already a TypeValidationError with the same value, it returns the cause.\n * Otherwise, it creates a new TypeValidationError.\n *\n * @param {Object} params - The parameters for wrapping the error.\n * @param {unknown} params.value - The value that failed validation.\n * @param {unknown} params.cause - The original error or cause of the validation failure.\n * @returns {TypeValidationError} A TypeValidationError instance.\n */\n static wrap({\n value,\n cause,\n }: {\n value: unknown;\n cause: unknown;\n }): TypeValidationError {\n return TypeValidationError.isInstance(cause) && cause.value === value\n ? cause\n : new TypeValidationError({ value, cause });\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_UnsupportedFunctionalityError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class UnsupportedFunctionalityError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly functionality: string;\n\n constructor({\n functionality,\n message = `'${functionality}' functionality not supported.`,\n }: {\n functionality: string;\n message?: string;\n }) {\n super({ name, message });\n this.functionality = functionality;\n }\n\n static isInstance(error: unknown): error is UnsupportedFunctionalityError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { JSONArray, JSONObject, JSONValue } from './json-value';\n\nexport function isJSONValue(value: unknown): value is JSONValue {\n if (\n value === null ||\n typeof value === 'string' ||\n typeof value === 'number' ||\n typeof value === 'boolean'\n ) {\n return true;\n }\n\n if (Array.isArray(value)) {\n return value.every(isJSONValue);\n }\n\n if (typeof value === 'object') {\n return Object.entries(value).every(\n ([key, val]) => typeof key === 'string' && isJSONValue(val),\n );\n }\n\n return false;\n}\n\nexport function isJSONArray(value: unknown): value is JSONArray {\n return Array.isArray(value) && value.every(isJSONValue);\n}\n\nexport function isJSONObject(value: unknown): value is JSONObject {\n return (\n value != null &&\n typeof value === 'object' &&\n Object.entries(value).every(\n ([key, val]) => typeof key === 'string' && isJSONValue(val),\n )\n );\n}\n","import { type LanguageModelV1Prompt, UnsupportedFunctionalityError } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): WorkersAIChatPrompt {\n\tconst messages: WorkersAIChatPrompt = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\t\t\t\t\t\tfunctionality: \"image-part\",\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part: ${exhaustiveCheck}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn messages;\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: args.messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? this.settings.gateway },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: output.tool_calls?.map((toolCall) => ({\n\t\t\t\ttoolCallType: \"function\",\n\t\t\t\ttoolCallId: toolCall.name,\n\t\t\t\ttoolName: toolCall.name,\n\t\t\t\targs: JSON.stringify(toolCall.arguments || {}),\n\t\t\t})),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(args.messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: args.messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? this.settings.gateway },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\tconst chunkEvent = events(new Response(response));\n\t\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\n\t\treturn {\n\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\tasync start(controller) {\n\t\t\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\t\t\tif (!event.data) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\t\t\tif (chunk.usage) {\n\t\t\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tchunk.response?.length &&\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\tusage: usage,\n\t\t\t\t\t});\n\t\t\t\t\tcontroller.close();\n\t\t\t\t},\n\t\t\t}),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nfunction lastMessageWasUser(messages: WorkersAIChatPrompt) {\n\treturn messages.length > 0 && messages[messages.length - 1].role === \"user\";\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { createRun } from \"./utils\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels, TextGenerationModels } from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAuDO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK;AAGtF,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,SAAS,mBAAmB;AAC/B,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;;;AChGA,IAAM,SAAS;AACf,IAAM,SAAS,OAAO,IAAI,MAAM;AALhC,IAAA;AAWO,IAAM,cAAN,MAAMA,qBAAmB,MAAM;;;;;;;;;EAgBpC,YAAY;IACV,MAAAC;IACA;IACA;EACF,GAIG;AACD,UAAM,OAAO;AAxBf,SAAkB,EAAA,IAAU;AA0B1B,SAAK,OAAOA;AACZ,SAAK,QAAQ;EACf;;;;;;EAOA,OAAO,WAAW,OAAqC;AACrD,WAAOD,aAAW,UAAU,OAAO,MAAM;EAC3C;EAEA,OAAiB,UAAU,OAAgBE,UAAyB;AAClE,UAAM,eAAe,OAAO,IAAIA,QAAM;AACtC,WACE,SAAS,QACT,OAAO,UAAU,YACjB,gBAAgB,SAChB,OAAO,MAAM,YAAY,MAAM,aAC/B,MAAM,YAAY,MAAM;EAE5B;AACF;AAjDoB,KAAA;AADb,IAAM,aAAN;ACTP,IAAM,OAAO;AACb,IAAMA,UAAS,mBAAmB,IAAI;AACtC,IAAMC,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAOoBC,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;AELpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAUoBC,MAAAC;ACRpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAWoBI,MAAAC;ACTpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAWoBI,MAAAC;ACRpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AALhC,IAAAH;AASoBI,MAAAC;ACPpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,WAAS,mBAAmBD,KAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAUoBI,OAAAC;ACRpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAOoBI,OAAAC;ACLpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAOoBI,OAAAC;ACJpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AALhC,IAAAH;AAQoBI,OAAAC;ACNpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,gCAAN,cAA4C,WAAW;EAK5D,YAAY;IACV;IACA,UAAU,IAAI,aAAa;EAC7B,GAGG;AACD,UAAM,EAAE,MAAAH,QAAM,QAAQ,CAAC;AAXzB,SAAkBG,IAAAA,IAAU;AAY1B,SAAK,gBAAgB;EACvB;EAEA,OAAO,WAAW,OAAwD;AACxE,WAAO,WAAW,UAAU,OAAOF,QAAM;EAC3C;AACF;AAlBoBE,OAAAD;;;AEJb,SAAS,+BAA+B,QAAoD;AAClG,QAAM,WAAgC,CAAC;AAEvC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AACb,sBAAM,IAAI,8BAA8B;AAAA,kBACvC,eAAe;AAAA,gBAChB,CAAC;AAAA,cACF;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,EAAE;AAAA,QACV,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACvD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAAE,QAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAAA,QAAM,WAAW,KAAK;AAAA,UACnC,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO;AACR;;;ACxGA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIC,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACzEO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ACMO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,+BAA+B,MAAM;AAAA,IAChD;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC,UAAU,KAAK;AAAA,QACf,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA,QAEZ,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,KAAK,SAAS,QAAQ;AAAA,IACzD;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,OAAO,YAAY,IAAI,CAAC,cAAc;AAAA,QAChD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS;AAAA,QACnB,MAAM,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,MAC9C,EAAE;AAAA,MACF,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAK/C,QAAI,KAAK,OAAO,UAAU,mBAAmB,KAAK,QAAQ,GAAG;AAC5D,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,QACvD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,UAAU,KAAK;AAAA,QACf,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA,QAEZ,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,KAAK,SAAS,QAAQ;AAAA,IACzD;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,UAAM,aAAa,OAAO,IAAI,SAAS,QAAQ,CAAC;AAChD,QAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAEnD,WAAO;AAAA,MACN,QAAQ,IAAI,eAA0C;AAAA,QACrD,MAAM,MAAM,YAAY;AACvB,2BAAiB,SAAS,YAAY;AACrC,gBAAI,CAAC,MAAM,MAAM;AAChB;AAAA,YACD;AACA,gBAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,YACD;AACA,kBAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,gBAAI,MAAM,OAAO;AAChB,sBAAQ,kBAAkB,KAAK;AAAA,YAChC;AACA,kBAAM,UAAU,UACf,WAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,WAAW,MAAM;AAAA,YAClB,CAAC;AAAA,UACH;AACA,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,cAAc;AAAA,YACd;AAAA,UACD,CAAC;AACD,qBAAW,MAAM;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,MACD,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,0BACR,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,QAC9E,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEA,SAAS,mBAAmB,UAA+B;AAC1D,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAE,SAAS;AACtE;;;ACzUO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;ACxDO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;","names":["_AISDKError","name","marker","symbol","_a","_a","symbol","name","marker","_a","symbol","name","marker","symbol","_a","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","symbol","_a","name","split","response","stream"]}
1
+ {"version":3,"sources":["../src/utils.ts","../../../node_modules/@ai-sdk/provider/src/errors/ai-sdk-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/api-call-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/empty-response-body-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/get-error-message.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-argument-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-prompt-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/invalid-response-data-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/json-parse-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/load-api-key-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/load-setting-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/no-content-generated-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/no-such-model-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/too-many-embedding-values-for-call-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/type-validation-error.ts","../../../node_modules/@ai-sdk/provider/src/errors/unsupported-functionality-error.ts","../../../node_modules/@ai-sdk/provider/src/json-value/is-json.ts","../src/convert-to-workersai-chat-messages.ts","../../../node_modules/fetch-event-stream/esm/deps/jsr.io/@std/streams/0.221.0/text_line_stream.js","../../../node_modules/fetch-event-stream/esm/utils.js","../../../node_modules/fetch-event-stream/esm/mod.js","../src/map-workersai-usage.ts","../src/workersai-chat-language-model.ts","../src/workersai-image-model.ts","../src/index.ts"],"sourcesContent":["/**\n * General AI run interface with overloads to handle distinct return types.\n *\n * The behaviour depends on the combination of parameters:\n * 1. `returnRawResponse: true` => returns the raw Response object.\n * 2. `stream: true` => returns a ReadableStream (if available).\n * 3. Otherwise => returns post-processed AI results.\n */\nexport interface AiRun {\n\t// (1) Return raw Response if `options.returnRawResponse` is `true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions: AiOptions & { returnRawResponse: true },\n\t): Promise<Response>;\n\n\t// (2) Return a stream if the input has `stream: true`.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"] & { stream: true },\n\t\toptions?: AiOptions,\n\t): Promise<ReadableStream<Uint8Array>>;\n\n\t// (3) Return post-processed outputs by default.\n\t<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions,\n\t): Promise<AiModels[Name][\"postProcessedOutputs\"]>;\n}\n\nexport type StringLike = string | { toString(): string };\n\n/**\n * Parameters for configuring the Cloudflare-based AI runner.\n */\nexport interface CreateRunConfig {\n\t/** Your Cloudflare account identifier. */\n\taccountId: string;\n\n\t/** Cloudflare API token/key with appropriate permissions. */\n\tapiKey: string;\n}\n\n/**\n * Creates a run method that emulates the Cloudflare Workers AI binding,\n * but uses the Cloudflare REST API under the hood. Headers and abort\n * signals are configured at creation time, rather than per-request.\n *\n * @param config An object containing:\n * - `accountId`: Cloudflare account identifier.\n * - `apiKey`: Cloudflare API token/key with suitable permissions.\n * - `headers`: Optional custom headers to merge with defaults.\n * - `signal`: Optional AbortSignal for request cancellation.\n *\n * @returns A function matching the AiRun interface.\n */\nexport function createRun(config: CreateRunConfig): AiRun {\n\tconst { accountId, apiKey } = config;\n\n\t// Return the AiRun-compatible function.\n\treturn async function run<Name extends keyof AiModels>(\n\t\tmodel: Name,\n\t\tinputs: AiModels[Name][\"inputs\"],\n\t\toptions?: AiOptions & Record<string, StringLike>,\n\t): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name][\"postProcessedOutputs\"]> {\n\t\tconst { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =\n\t\t\toptions || {};\n\n\t\tconst urlParams = new URLSearchParams();\n\t\tfor (const [key, value] of Object.entries(passthroughOptions)) {\n\t\t\t// throw a useful error if the value is not to-stringable\n\t\t\ttry {\n\t\t\t\tconst valueStr = value.toString();\n\t\t\t\tif (!valueStr) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\turlParams.append(key, valueStr);\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Value for option '${key}' is not able to be coerced into a string.`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${urlParams ? `?${urlParams}` : \"\"}`;\n\n\t\t// Merge default and custom headers.\n\t\tconst headers = {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\tAuthorization: `Bearer ${apiKey}`,\n\t\t};\n\n\t\tconst body = JSON.stringify(inputs);\n\n\t\t// Execute the POST request. The optional AbortSignal is applied here.\n\t\tconst response = await fetch(url, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders,\n\t\t\tbody,\n\t\t});\n\n\t\t// (1) If the user explicitly requests the raw Response, return it as-is.\n\t\tif (returnRawResponse) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// (2) If the AI input requests streaming, return the ReadableStream if available.\n\t\tif ((inputs as AiTextGenerationInput).stream === true) {\n\t\t\tif (response.body) {\n\t\t\t\treturn response.body;\n\t\t\t}\n\t\t\tthrow new Error(\"No readable body available for streaming.\");\n\t\t}\n\n\t\t// (3) In all other cases, parse JSON and return the result field.\n\t\tconst data = await response.json<{\n\t\t\tresult: AiModels[Name][\"postProcessedOutputs\"];\n\t\t}>();\n\t\treturn data.result;\n\t};\n}\n","/**\n * Symbol used for identifying AI SDK Error instances.\n * Enables checking if an error is an instance of AISDKError across package versions.\n */\nconst marker = 'vercel.ai.error';\nconst symbol = Symbol.for(marker);\n\n/**\n * Custom error class for AI SDK related errors.\n * @extends Error\n */\nexport class AISDKError extends Error {\n private readonly [symbol] = true; // used in isInstance\n\n /**\n * The underlying cause of the error, if any.\n */\n readonly cause?: unknown;\n\n /**\n * Creates an AI SDK Error.\n *\n * @param {Object} params - The parameters for creating the error.\n * @param {string} params.name - The name of the error.\n * @param {string} params.message - The error message.\n * @param {unknown} [params.cause] - The underlying cause of the error.\n */\n constructor({\n name,\n message,\n cause,\n }: {\n name: string;\n message: string;\n cause?: unknown;\n }) {\n super(message);\n\n this.name = name;\n this.cause = cause;\n }\n\n /**\n * Checks if the given error is an AI SDK Error.\n * @param {unknown} error - The error to check.\n * @returns {boolean} True if the error is an AI SDK Error, false otherwise.\n */\n static isInstance(error: unknown): error is AISDKError {\n return AISDKError.hasMarker(error, marker);\n }\n\n protected static hasMarker(error: unknown, marker: string): boolean {\n const markerSymbol = Symbol.for(marker);\n return (\n error != null &&\n typeof error === 'object' &&\n markerSymbol in error &&\n typeof error[markerSymbol] === 'boolean' &&\n error[markerSymbol] === true\n );\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_APICallError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class APICallError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly url: string;\n readonly requestBodyValues: unknown;\n readonly statusCode?: number;\n\n readonly responseHeaders?: Record<string, string>;\n readonly responseBody?: string;\n\n readonly isRetryable: boolean;\n readonly data?: unknown;\n\n constructor({\n message,\n url,\n requestBodyValues,\n statusCode,\n responseHeaders,\n responseBody,\n cause,\n isRetryable = statusCode != null &&\n (statusCode === 408 || // request timeout\n statusCode === 409 || // conflict\n statusCode === 429 || // too many requests\n statusCode >= 500), // server error\n data,\n }: {\n message: string;\n url: string;\n requestBodyValues: unknown;\n statusCode?: number;\n responseHeaders?: Record<string, string>;\n responseBody?: string;\n cause?: unknown;\n isRetryable?: boolean;\n data?: unknown;\n }) {\n super({ name, message, cause });\n\n this.url = url;\n this.requestBodyValues = requestBodyValues;\n this.statusCode = statusCode;\n this.responseHeaders = responseHeaders;\n this.responseBody = responseBody;\n this.isRetryable = isRetryable;\n this.data = data;\n }\n\n static isInstance(error: unknown): error is APICallError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_EmptyResponseBodyError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class EmptyResponseBodyError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message = 'Empty response body' }: { message?: string } = {}) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is EmptyResponseBodyError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","export function getErrorMessage(error: unknown | undefined) {\n if (error == null) {\n return 'unknown error';\n }\n\n if (typeof error === 'string') {\n return error;\n }\n\n if (error instanceof Error) {\n return error.message;\n }\n\n return JSON.stringify(error);\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidArgumentError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * A function argument is invalid.\n */\nexport class InvalidArgumentError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly argument: string;\n\n constructor({\n message,\n cause,\n argument,\n }: {\n argument: string;\n message: string;\n cause?: unknown;\n }) {\n super({ name, message, cause });\n\n this.argument = argument;\n }\n\n static isInstance(error: unknown): error is InvalidArgumentError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidPromptError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * A prompt is invalid. This error should be thrown by providers when they cannot\n * process a prompt.\n */\nexport class InvalidPromptError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly prompt: unknown;\n\n constructor({\n prompt,\n message,\n cause,\n }: {\n prompt: unknown;\n message: string;\n cause?: unknown;\n }) {\n super({ name, message: `Invalid prompt: ${message}`, cause });\n\n this.prompt = prompt;\n }\n\n static isInstance(error: unknown): error is InvalidPromptError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_InvalidResponseDataError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\n * Server returned a response with invalid data content.\n * This should be thrown by providers when they cannot parse the response from the API.\n */\nexport class InvalidResponseDataError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly data: unknown;\n\n constructor({\n data,\n message = `Invalid response data: ${JSON.stringify(data)}.`,\n }: {\n data: unknown;\n message?: string;\n }) {\n super({ name, message });\n\n this.data = data;\n }\n\n static isInstance(error: unknown): error is InvalidResponseDataError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\nimport { getErrorMessage } from './get-error-message';\n\nconst name = 'AI_JSONParseError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n// TODO v5: rename to ParseError\nexport class JSONParseError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly text: string;\n\n constructor({ text, cause }: { text: string; cause: unknown }) {\n super({\n name,\n message:\n `JSON parsing failed: ` +\n `Text: ${text}.\\n` +\n `Error message: ${getErrorMessage(cause)}`,\n cause,\n });\n\n this.text = text;\n }\n\n static isInstance(error: unknown): error is JSONParseError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_LoadAPIKeyError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class LoadAPIKeyError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message }: { message: string }) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is LoadAPIKeyError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_LoadSettingError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class LoadSettingError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({ message }: { message: string }) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is LoadSettingError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_NoContentGeneratedError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\n/**\nThrown when the AI provider fails to generate any content.\n */\nexport class NoContentGeneratedError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n constructor({\n message = 'No content generated.',\n }: { message?: string } = {}) {\n super({ name, message });\n }\n\n static isInstance(error: unknown): error is NoContentGeneratedError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_NoSuchModelError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class NoSuchModelError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly modelId: string;\n readonly modelType: 'languageModel' | 'textEmbeddingModel' | 'imageModel';\n\n constructor({\n errorName = name,\n modelId,\n modelType,\n message = `No such ${modelType}: ${modelId}`,\n }: {\n errorName?: string;\n modelId: string;\n modelType: 'languageModel' | 'textEmbeddingModel' | 'imageModel';\n message?: string;\n }) {\n super({ name: errorName, message });\n\n this.modelId = modelId;\n this.modelType = modelType;\n }\n\n static isInstance(error: unknown): error is NoSuchModelError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_TooManyEmbeddingValuesForCallError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class TooManyEmbeddingValuesForCallError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly provider: string;\n readonly modelId: string;\n readonly maxEmbeddingsPerCall: number;\n readonly values: Array<unknown>;\n\n constructor(options: {\n provider: string;\n modelId: string;\n maxEmbeddingsPerCall: number;\n values: Array<unknown>;\n }) {\n super({\n name,\n message:\n `Too many values for a single embedding call. ` +\n `The ${options.provider} model \"${options.modelId}\" can only embed up to ` +\n `${options.maxEmbeddingsPerCall} values per call, but ${options.values.length} values were provided.`,\n });\n\n this.provider = options.provider;\n this.modelId = options.modelId;\n this.maxEmbeddingsPerCall = options.maxEmbeddingsPerCall;\n this.values = options.values;\n }\n\n static isInstance(\n error: unknown,\n ): error is TooManyEmbeddingValuesForCallError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { AISDKError } from './ai-sdk-error';\nimport { getErrorMessage } from './get-error-message';\n\nconst name = 'AI_TypeValidationError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class TypeValidationError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly value: unknown;\n\n constructor({ value, cause }: { value: unknown; cause: unknown }) {\n super({\n name,\n message:\n `Type validation failed: ` +\n `Value: ${JSON.stringify(value)}.\\n` +\n `Error message: ${getErrorMessage(cause)}`,\n cause,\n });\n\n this.value = value;\n }\n\n static isInstance(error: unknown): error is TypeValidationError {\n return AISDKError.hasMarker(error, marker);\n }\n\n /**\n * Wraps an error into a TypeValidationError.\n * If the cause is already a TypeValidationError with the same value, it returns the cause.\n * Otherwise, it creates a new TypeValidationError.\n *\n * @param {Object} params - The parameters for wrapping the error.\n * @param {unknown} params.value - The value that failed validation.\n * @param {unknown} params.cause - The original error or cause of the validation failure.\n * @returns {TypeValidationError} A TypeValidationError instance.\n */\n static wrap({\n value,\n cause,\n }: {\n value: unknown;\n cause: unknown;\n }): TypeValidationError {\n return TypeValidationError.isInstance(cause) && cause.value === value\n ? cause\n : new TypeValidationError({ value, cause });\n }\n}\n","import { AISDKError } from './ai-sdk-error';\n\nconst name = 'AI_UnsupportedFunctionalityError';\nconst marker = `vercel.ai.error.${name}`;\nconst symbol = Symbol.for(marker);\n\nexport class UnsupportedFunctionalityError extends AISDKError {\n private readonly [symbol] = true; // used in isInstance\n\n readonly functionality: string;\n\n constructor({\n functionality,\n message = `'${functionality}' functionality not supported.`,\n }: {\n functionality: string;\n message?: string;\n }) {\n super({ name, message });\n this.functionality = functionality;\n }\n\n static isInstance(error: unknown): error is UnsupportedFunctionalityError {\n return AISDKError.hasMarker(error, marker);\n }\n}\n","import { JSONArray, JSONObject, JSONValue } from './json-value';\n\nexport function isJSONValue(value: unknown): value is JSONValue {\n if (\n value === null ||\n typeof value === 'string' ||\n typeof value === 'number' ||\n typeof value === 'boolean'\n ) {\n return true;\n }\n\n if (Array.isArray(value)) {\n return value.every(isJSONValue);\n }\n\n if (typeof value === 'object') {\n return Object.entries(value).every(\n ([key, val]) => typeof key === 'string' && isJSONValue(val),\n );\n }\n\n return false;\n}\n\nexport function isJSONArray(value: unknown): value is JSONArray {\n return Array.isArray(value) && value.every(isJSONValue);\n}\n\nexport function isJSONObject(value: unknown): value is JSONObject {\n return (\n value != null &&\n typeof value === 'object' &&\n Object.entries(value).every(\n ([key, val]) => typeof key === 'string' && isJSONValue(val),\n )\n );\n}\n","import { type LanguageModelV1Prompt, UnsupportedFunctionalityError } from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\nexport function convertToWorkersAIChatMessages(prompt: LanguageModelV1Prompt): WorkersAIChatPrompt {\n\tconst messages: WorkersAIChatPrompt = [];\n\n\tfor (const { role, content } of prompt) {\n\t\tswitch (role) {\n\t\t\tcase \"system\": {\n\t\t\t\tmessages.push({ role: \"system\", content });\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"user\": {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: content\n\t\t\t\t\t\t.map((part) => {\n\t\t\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\t\t\treturn part.text;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcase \"image\": {\n\t\t\t\t\t\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\t\t\t\t\t\tfunctionality: \"image-part\",\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.join(\"\"),\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"assistant\": {\n\t\t\t\tlet text = \"\";\n\t\t\t\tconst toolCalls: Array<{\n\t\t\t\t\tid: string;\n\t\t\t\t\ttype: \"function\";\n\t\t\t\t\tfunction: { name: string; arguments: string };\n\t\t\t\t}> = [];\n\n\t\t\t\tfor (const part of content) {\n\t\t\t\t\tswitch (part.type) {\n\t\t\t\t\t\tcase \"text\": {\n\t\t\t\t\t\t\ttext += part.text;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcase \"tool-call\": {\n\t\t\t\t\t\t\ttext = JSON.stringify({\n\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\tparameters: part.args,\n\t\t\t\t\t\t\t});\n\n\t\t\t\t\t\t\ttoolCalls.push({\n\t\t\t\t\t\t\t\tid: part.toolCallId,\n\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\tfunction: {\n\t\t\t\t\t\t\t\t\tname: part.toolName,\n\t\t\t\t\t\t\t\t\targuments: JSON.stringify(part.args),\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdefault: {\n\t\t\t\t\t\t\tconst exhaustiveCheck = part;\n\t\t\t\t\t\t\tthrow new Error(`Unsupported part: ${exhaustiveCheck}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\tcontent: text,\n\t\t\t\t\ttool_calls:\n\t\t\t\t\t\ttoolCalls.length > 0\n\t\t\t\t\t\t\t? toolCalls.map(({ function: { name, arguments: args } }) => ({\n\t\t\t\t\t\t\t\t\tid: \"null\",\n\t\t\t\t\t\t\t\t\ttype: \"function\",\n\t\t\t\t\t\t\t\t\tfunction: { name, arguments: args },\n\t\t\t\t\t\t\t\t}))\n\t\t\t\t\t\t\t: undefined,\n\t\t\t\t});\n\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"tool\": {\n\t\t\t\tfor (const toolResponse of content) {\n\t\t\t\t\tmessages.push({\n\t\t\t\t\t\trole: \"tool\",\n\t\t\t\t\t\tname: toolResponse.toolName,\n\t\t\t\t\t\tcontent: JSON.stringify(toolResponse.result),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = role satisfies never;\n\t\t\t\tthrow new Error(`Unsupported role: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn messages;\n}\n","// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.\n// This module is browser compatible.\n/**\n * Transform a stream into a stream where each chunk is divided by a newline,\n * be it `\\n` or `\\r\\n`. `\\r` can be enabled via the `allowCR` option.\n *\n * @example\n * ```ts\n * import { TextLineStream } from \"@std/streams/text-line-stream\";\n *\n * const res = await fetch(\"https://example.com\");\n * const lines = res.body!\n * .pipeThrough(new TextDecoderStream())\n * .pipeThrough(new TextLineStream());\n * ```\n */\nexport class TextLineStream extends TransformStream {\n #currentLine = \"\";\n /** Constructs a new instance. */\n constructor(options = { allowCR: false }) {\n super({\n transform: (chars, controller) => {\n chars = this.#currentLine + chars;\n while (true) {\n const lfIndex = chars.indexOf(\"\\n\");\n const crIndex = options.allowCR ? chars.indexOf(\"\\r\") : -1;\n if (crIndex !== -1 && crIndex !== (chars.length - 1) &&\n (lfIndex === -1 || (lfIndex - 1) > crIndex)) {\n controller.enqueue(chars.slice(0, crIndex));\n chars = chars.slice(crIndex + 1);\n continue;\n }\n if (lfIndex === -1)\n break;\n const endIndex = chars[lfIndex - 1] === \"\\r\" ? lfIndex - 1 : lfIndex;\n controller.enqueue(chars.slice(0, endIndex));\n chars = chars.slice(lfIndex + 1);\n }\n this.#currentLine = chars;\n },\n flush: (controller) => {\n if (this.#currentLine === \"\")\n return;\n const currentLine = options.allowCR && this.#currentLine.endsWith(\"\\r\")\n ? this.#currentLine.slice(0, -1)\n : this.#currentLine;\n controller.enqueue(currentLine);\n },\n });\n }\n}\n","import { TextLineStream } from './deps/jsr.io/@std/streams/0.221.0/text_line_stream.js';\nexport function stream(input) {\n let decoder = new TextDecoderStream();\n let split = new TextLineStream({ allowCR: true });\n return input.pipeThrough(decoder).pipeThrough(split);\n}\nexport function split(input) {\n let rgx = /[:]\\s*/;\n let match = rgx.exec(input);\n // \": comment\" -> index=0 -> ignore\n let idx = match && match.index;\n if (idx) {\n return [\n input.substring(0, idx),\n input.substring(idx + match[0].length),\n ];\n }\n}\nexport function fallback(headers, key, value) {\n let tmp = headers.get(key);\n if (!tmp)\n headers.set(key, value);\n}\n","import * as utils from './utils.js';\n/**\n * Convert a `Response` body containing Server Sent Events (SSE) into an Async Iterator that yields {@linkcode ServerSentEventMessage} objects.\n *\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events}\n *\n * @example\n * ```js\n * // Optional\n * let abort = new AbortController;\n *\n * // Manually fetch a Response\n * let res = await fetch('https://...', {\n * method: 'POST',\n * signal: abort.signal,\n * headers: {\n * 'api-key': 'token <value>',\n * 'content-type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true, // <- hypothetical\n * // ...\n * })\n * });\n *\n * if (res.ok) {\n * let stream = events(res, abort.signal);\n * for await (let event of stream) {\n * console.log('<<', event.data);\n * }\n * }\n * ```\n */\nexport async function* events(res, signal) {\n // TODO: throw error?\n if (!res.body)\n return;\n let iter = utils.stream(res.body);\n let line, reader = iter.getReader();\n let event;\n for (;;) {\n if (signal && signal.aborted) {\n return reader.cancel();\n }\n line = await reader.read();\n if (line.done)\n return;\n if (!line.value) {\n if (event)\n yield event;\n event = undefined;\n continue;\n }\n let [field, value] = utils.split(line.value) || [];\n if (!field)\n continue; // comment or invalid\n if (field === 'data') {\n event ||= {};\n event[field] = event[field] ? (event[field] + '\\n' + value) : value;\n }\n else if (field === 'event') {\n event ||= {};\n event[field] = value;\n }\n else if (field === 'id') {\n event ||= {};\n event[field] = +value || value;\n }\n else if (field === 'retry') {\n event ||= {};\n event[field] = +value || undefined;\n }\n }\n}\n/**\n * Convenience function that will `fetch` with the given arguments and, if ok, will return the {@linkcode events} async iterator.\n *\n * If the response is not ok (status 200-299), the `Response` is thrown.\n *\n * @example\n * ```js\n * // NOTE: throws `Response` if not 2xx status\n * let events = await stream('https://api.openai.com/...', {\n * method: 'POST',\n * headers: {\n * 'Authorization': 'Bearer <token>',\n * 'Content-Type': 'application/json',\n * },\n * body: JSON.stringify({\n * stream: true,\n * // ...\n * })\n * });\n *\n * for await (let event of events) {\n * console.log('<<', JSON.parse(event.data));\n * }\n * ```\n */\nexport async function stream(input, init) {\n let req = new Request(input, init);\n utils.fallback(req.headers, 'Accept', 'text/event-stream');\n utils.fallback(req.headers, 'Content-Type', 'application/json');\n let r = await fetch(req);\n if (!r.ok)\n throw r;\n return events(r, req.signal);\n}\n","export function mapWorkersAIUsage(output: AiTextGenerationOutput | AiTextToImageOutput) {\n\tconst usage = (\n\t\toutput as {\n\t\t\tusage: { prompt_tokens: number; completion_tokens: number };\n\t\t}\n\t).usage ?? {\n\t\tprompt_tokens: 0,\n\t\tcompletion_tokens: 0,\n\t};\n\n\treturn {\n\t\tpromptTokens: usage.prompt_tokens,\n\t\tcompletionTokens: usage.completion_tokens,\n\t};\n}\n","import {\n\ttype LanguageModelV1,\n\ttype LanguageModelV1CallWarning,\n\ttype LanguageModelV1StreamPart,\n\tUnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { events } from \"fetch-event-stream\";\nimport { mapWorkersAIUsage } from \"./map-workersai-usage\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\ntype WorkersAIChatConfig = {\n\tprovider: string;\n\tbinding: Ai;\n\tgateway?: GatewayOptions;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\treadonly defaultObjectGenerationMode = \"json\";\n\n\treadonly modelId: TextGenerationModels;\n\treadonly settings: WorkersAIChatSettings;\n\n\tprivate readonly config: WorkersAIChatConfig;\n\n\tconstructor(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings: WorkersAIChatSettings,\n\t\tconfig: WorkersAIChatConfig,\n\t) {\n\t\tthis.modelId = modelId;\n\t\tthis.settings = settings;\n\t\tthis.config = config;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\n\tprivate getArgs({\n\t\tmode,\n\t\tprompt,\n\t\tmaxTokens,\n\t\ttemperature,\n\t\ttopP,\n\t\tfrequencyPenalty,\n\t\tpresencePenalty,\n\t\tseed,\n\t}: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n\t\tconst type = mode.type;\n\n\t\tconst warnings: LanguageModelV1CallWarning[] = [];\n\n\t\tif (frequencyPenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"frequencyPenalty\",\n\t\t\t});\n\t\t}\n\n\t\tif (presencePenalty != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"presencePenalty\",\n\t\t\t});\n\t\t}\n\n\t\tconst baseArgs = {\n\t\t\t// model id:\n\t\t\tmodel: this.modelId,\n\n\t\t\t// model specific settings:\n\t\t\tsafe_prompt: this.settings.safePrompt,\n\n\t\t\t// standardized settings:\n\t\t\tmax_tokens: maxTokens,\n\t\t\ttemperature,\n\t\t\ttop_p: topP,\n\t\t\trandom_seed: seed,\n\n\t\t\t// messages:\n\t\t\tmessages: convertToWorkersAIChatMessages(prompt),\n\t\t};\n\n\t\tswitch (type) {\n\t\t\tcase \"regular\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-json\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\tresponse_format: {\n\t\t\t\t\t\t\ttype: \"json_schema\",\n\t\t\t\t\t\t\tjson_schema: mode.schema,\n\t\t\t\t\t\t},\n\t\t\t\t\t\ttools: undefined,\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tcase \"object-tool\": {\n\t\t\t\treturn {\n\t\t\t\t\targs: {\n\t\t\t\t\t\t...baseArgs,\n\t\t\t\t\t\ttool_choice: \"any\",\n\t\t\t\t\t\ttools: [{ type: \"function\", function: mode.tool }],\n\t\t\t\t\t},\n\t\t\t\t\twarnings,\n\t\t\t\t};\n\t\t\t}\n\n\t\t\t// @ts-expect-error - this is unreachable code\n\t\t\t// TODO: fixme\n\t\t\tcase \"object-grammar\": {\n\t\t\t\tthrow new UnsupportedFunctionalityError({\n\t\t\t\t\tfunctionality: \"object-grammar mode\",\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tdefault: {\n\t\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\t\tthrow new Error(`Unsupported type: ${exhaustiveCheck}`);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync doGenerate(\n\t\toptions: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\tconst { gateway, safePrompt, ...passthroughOptions } = this.settings;\n\n\t\tconst output = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: args.messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (output instanceof ReadableStream) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\treturn {\n\t\t\ttext:\n\t\t\t\ttypeof output.response === \"object\" && output.response !== null\n\t\t\t\t\t? JSON.stringify(output.response) // ai-sdk expects a string here\n\t\t\t\t\t: output.response,\n\t\t\ttoolCalls: output.tool_calls?.map((toolCall) => ({\n\t\t\t\ttoolCallType: \"function\",\n\t\t\t\ttoolCallId: toolCall.name,\n\t\t\t\ttoolName: toolCall.name,\n\t\t\t\targs: JSON.stringify(toolCall.arguments || {}),\n\t\t\t})),\n\t\t\tfinishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\tusage: mapWorkersAIUsage(output),\n\t\t\twarnings,\n\t\t};\n\t}\n\n\tasync doStream(\n\t\toptions: Parameters<LanguageModelV1[\"doStream\"]>[0],\n\t): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n\t\tconst { args, warnings } = this.getArgs(options);\n\n\t\t// [1] When the latest message is not a tool response, we use the regular generate function\n\t\t// and simulate it as a streamed response in order to satisfy the AI SDK's interface for\n\t\t// doStream...\n\t\tif (args.tools?.length && lastMessageWasUser(args.messages)) {\n\t\t\tconst response = await this.doGenerate(options);\n\n\t\t\tif (response instanceof ReadableStream) {\n\t\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\t\tasync start(controller) {\n\t\t\t\t\t\tif (response.text) {\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: response.text,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (response.toolCalls) {\n\t\t\t\t\t\t\tfor (const toolCall of response.toolCalls) {\n\t\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\t\ttype: \"tool-call\",\n\t\t\t\t\t\t\t\t\t...toolCall,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\t\tusage: response.usage,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcontroller.close();\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\t\twarnings,\n\t\t\t};\n\t\t}\n\n\t\t// [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.\n\t\tconst { gateway, ...passthroughOptions } = this.settings;\n\n\t\tconst response = await this.config.binding.run(\n\t\t\targs.model,\n\t\t\t{\n\t\t\t\tmessages: args.messages,\n\t\t\t\tmax_tokens: args.max_tokens,\n\t\t\t\tstream: true,\n\t\t\t\ttemperature: args.temperature,\n\t\t\t\ttools: args.tools,\n\t\t\t\ttop_p: args.top_p,\n\t\t\t\t// @ts-expect-error response_format not yet added to types\n\t\t\t\tresponse_format: args.response_format,\n\t\t\t},\n\t\t\t{ gateway: this.config.gateway ?? gateway, ...passthroughOptions },\n\t\t);\n\n\t\tif (!(response instanceof ReadableStream)) {\n\t\t\tthrow new Error(\"This shouldn't happen\");\n\t\t}\n\n\t\tconst chunkEvent = events(new Response(response));\n\t\tlet usage = { promptTokens: 0, completionTokens: 0 };\n\n\t\treturn {\n\t\t\tstream: new ReadableStream<LanguageModelV1StreamPart>({\n\t\t\t\tasync start(controller) {\n\t\t\t\t\tfor await (const event of chunkEvent) {\n\t\t\t\t\t\tif (!event.data) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (event.data === \"[DONE]\") {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tconst chunk = JSON.parse(event.data);\n\t\t\t\t\t\tif (chunk.usage) {\n\t\t\t\t\t\t\tusage = mapWorkersAIUsage(chunk);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tchunk.response?.length &&\n\t\t\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\t\t\ttype: \"text-delta\",\n\t\t\t\t\t\t\t\ttextDelta: chunk.response,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\tcontroller.enqueue({\n\t\t\t\t\t\ttype: \"finish\",\n\t\t\t\t\t\tfinishReason: \"stop\",\n\t\t\t\t\t\tusage: usage,\n\t\t\t\t\t});\n\t\t\t\t\tcontroller.close();\n\t\t\t\t},\n\t\t\t}),\n\t\t\trawCall: { rawPrompt: args.messages, rawSettings: args },\n\t\t\twarnings,\n\t\t};\n\t}\n}\n\nfunction prepareToolsAndToolChoice(\n\tmode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n\t\ttype: \"regular\";\n\t},\n) {\n\t// when the tools array is empty, change it to undefined to prevent errors:\n\tconst tools = mode.tools?.length ? mode.tools : undefined;\n\n\tif (tools == null) {\n\t\treturn { tools: undefined, tool_choice: undefined };\n\t}\n\n\tconst mappedTools = tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\t// @ts-expect-error - description is not a property of tool\n\t\t\tdescription: tool.description,\n\t\t\t// @ts-expect-error - parameters is not a property of tool\n\t\t\tparameters: tool.parameters,\n\t\t},\n\t}));\n\n\tconst toolChoice = mode.toolChoice;\n\n\tif (toolChoice == null) {\n\t\treturn { tools: mappedTools, tool_choice: undefined };\n\t}\n\n\tconst type = toolChoice.type;\n\n\tswitch (type) {\n\t\tcase \"auto\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"none\":\n\t\t\treturn { tools: mappedTools, tool_choice: type };\n\t\tcase \"required\":\n\t\t\treturn { tools: mappedTools, tool_choice: \"any\" };\n\n\t\t// workersAI does not support tool mode directly,\n\t\t// so we filter the tools and force the tool choice through 'any'\n\t\tcase \"tool\":\n\t\t\treturn {\n\t\t\t\ttools: mappedTools.filter((tool) => tool.function.name === toolChoice.toolName),\n\t\t\t\ttool_choice: \"any\",\n\t\t\t};\n\t\tdefault: {\n\t\t\tconst exhaustiveCheck = type satisfies never;\n\t\t\tthrow new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n\t\t}\n\t}\n}\n\nfunction lastMessageWasUser(messages: WorkersAIChatPrompt) {\n\treturn messages.length > 0 && messages[messages.length - 1].role === \"user\";\n}\n","import type { ImageModelV1, ImageModelV1CallWarning } from \"@ai-sdk/provider\";\nimport type { WorkersAIImageConfig } from \"./workersai-image-config\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels } from \"./workersai-models\";\n\nexport class WorkersAIImageModel implements ImageModelV1 {\n\treadonly specificationVersion = \"v1\";\n\n\tget maxImagesPerCall(): number {\n\t\treturn this.settings.maxImagesPerCall ?? 1;\n\t}\n\n\tget provider(): string {\n\t\treturn this.config.provider;\n\t}\n\tconstructor(\n\t\treadonly modelId: ImageGenerationModels,\n\t\treadonly settings: WorkersAIImageSettings,\n\t\treadonly config: WorkersAIImageConfig,\n\t) {}\n\n\tasync doGenerate({\n\t\tprompt,\n\t\tn,\n\t\tsize,\n\t\taspectRatio,\n\t\tseed,\n\t\t// headers,\n\t\t// abortSignal,\n\t}: Parameters<ImageModelV1[\"doGenerate\"]>[0]): Promise<\n\t\tAwaited<ReturnType<ImageModelV1[\"doGenerate\"]>>\n\t> {\n\t\tconst { width, height } = getDimensionsFromSizeString(size);\n\n\t\tconst warnings: Array<ImageModelV1CallWarning> = [];\n\n\t\tif (aspectRatio != null) {\n\t\t\twarnings.push({\n\t\t\t\ttype: \"unsupported-setting\",\n\t\t\t\tsetting: \"aspectRatio\",\n\t\t\t\tdetails: \"This model does not support aspect ratio. Use `size` instead.\",\n\t\t\t});\n\t\t}\n\n\t\tconst generateImage = async () => {\n\t\t\tconst outputStream: ReadableStream<Uint8Array> = await this.config.binding.run(\n\t\t\t\tthis.modelId,\n\t\t\t\t{\n\t\t\t\t\tprompt,\n\t\t\t\t\tseed,\n\t\t\t\t\twidth,\n\t\t\t\t\theight,\n\t\t\t\t},\n\t\t\t);\n\n\t\t\t// Convert the output stream to a Uint8Array.\n\t\t\treturn streamToUint8Array(outputStream);\n\t\t};\n\n\t\tconst images: Uint8Array[] = await Promise.all(\n\t\t\tArray.from({ length: n }, () => generateImage()),\n\t\t);\n\n\t\t// type AiTextToImageOutput = ReadableStream<Uint8Array>;\n\n\t\treturn {\n\t\t\timages,\n\t\t\twarnings,\n\t\t\tresponse: {\n\t\t\t\ttimestamp: new Date(),\n\t\t\t\tmodelId: this.modelId,\n\t\t\t\theaders: {},\n\t\t\t},\n\t\t};\n\t}\n}\n\nfunction getDimensionsFromSizeString(size: string | undefined) {\n\tconst [width, height] = size?.split(\"x\") ?? [undefined, undefined];\n\n\treturn {\n\t\twidth: parseInteger(width),\n\t\theight: parseInteger(height),\n\t};\n}\n\nfunction parseInteger(value?: string) {\n\tif (value === \"\" || !value) return undefined;\n\tconst number = Number(value);\n\treturn Number.isInteger(number) ? number : undefined;\n}\n\nasync function streamToUint8Array(stream: ReadableStream<Uint8Array>): Promise<Uint8Array> {\n\tconst reader = stream.getReader();\n\tconst chunks: Uint8Array[] = [];\n\tlet totalLength = 0;\n\n\t// Read the stream until it is finished.\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tchunks.push(value);\n\t\ttotalLength += value.length;\n\t}\n\n\t// Allocate a new Uint8Array to hold all the data.\n\tconst result = new Uint8Array(totalLength);\n\tlet offset = 0;\n\tfor (const chunk of chunks) {\n\t\tresult.set(chunk, offset);\n\t\toffset += chunk.length;\n\t}\n\treturn result;\n}\n","import { createRun } from \"./utils\";\nimport { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { WorkersAIImageModel } from \"./workersai-image-model\";\nimport type { WorkersAIImageSettings } from \"./workersai-image-settings\";\nimport type { ImageGenerationModels, TextGenerationModels } from \"./workersai-models\";\n\nexport type WorkersAISettings = (\n\t| {\n\t\t\t/**\n\t\t\t * Provide a Cloudflare AI binding.\n\t\t\t */\n\t\t\tbinding: Ai;\n\n\t\t\t/**\n\t\t\t * Credentials must be absent when a binding is given.\n\t\t\t */\n\t\t\taccountId?: never;\n\t\t\tapiKey?: never;\n\t }\n\t| {\n\t\t\t/**\n\t\t\t * Provide Cloudflare API credentials directly. Must be used if a binding is not specified.\n\t\t\t */\n\t\t\taccountId: string;\n\t\t\tapiKey: string;\n\t\t\t/**\n\t\t\t * Both binding must be absent if credentials are used directly.\n\t\t\t */\n\t\t\tbinding?: never;\n\t }\n) & {\n\t/**\n\t * Optionally specify a gateway.\n\t */\n\tgateway?: GatewayOptions;\n};\n\nexport interface WorkersAI {\n\t(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;\n\t/**\n\t * Creates a model for text generation.\n\t **/\n\tchat(\n\t\tmodelId: TextGenerationModels,\n\t\tsettings?: WorkersAIChatSettings,\n\t): WorkersAIChatLanguageModel;\n\n\t/**\n\t * Creates a model for image generation.\n\t **/\n\timage(modelId: ImageGenerationModels, settings?: WorkersAIImageSettings): WorkersAIImageModel;\n}\n\n/**\n * Create a Workers AI provider instance.\n */\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n\t// Use a binding if one is directly provided. Otherwise use credentials to create\n\t// a `run` method that calls the Cloudflare REST API.\n\tlet binding: Ai | undefined;\n\n\tif (options.binding) {\n\t\tbinding = options.binding;\n\t} else {\n\t\tconst { accountId, apiKey } = options;\n\t\tbinding = {\n\t\t\trun: createRun({ accountId, apiKey }),\n\t\t} as Ai;\n\t}\n\n\tif (!binding) {\n\t\tthrow new Error(\"Either a binding or credentials must be provided.\");\n\t}\n\n\tconst createChatModel = (modelId: TextGenerationModels, settings: WorkersAIChatSettings = {}) =>\n\t\tnew WorkersAIChatLanguageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.chat\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst createImageModel = (\n\t\tmodelId: ImageGenerationModels,\n\t\tsettings: WorkersAIImageSettings = {},\n\t) =>\n\t\tnew WorkersAIImageModel(modelId, settings, {\n\t\t\tprovider: \"workersai.image\",\n\t\t\tbinding,\n\t\t\tgateway: options.gateway,\n\t\t});\n\n\tconst provider = (modelId: TextGenerationModels, settings?: WorkersAIChatSettings) => {\n\t\tif (new.target) {\n\t\t\tthrow new Error(\"The WorkersAI model function cannot be called with the new keyword.\");\n\t\t}\n\t\treturn createChatModel(modelId, settings);\n\t};\n\n\tprovider.chat = createChatModel;\n\tprovider.image = createImageModel;\n\tprovider.imageModel = createImageModel;\n\n\treturn provider;\n}\n"],"mappings":";;;;;;;;;;;;AAyDO,SAAS,UAAU,QAAgC;AACzD,QAAM,EAAE,WAAW,OAAO,IAAI;AAG9B,SAAO,eAAe,IACrB,OACA,QACA,SAC0F;AAC1F,UAAM,EAAE,SAAS,QAAQ,cAAc,mBAAmB,GAAG,mBAAmB,IAC/E,WAAW,CAAC;AAEb,UAAM,YAAY,IAAI,gBAAgB;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,kBAAkB,GAAG;AAE9D,UAAI;AACH,cAAM,WAAW,MAAM,SAAS;AAChC,YAAI,CAAC,UAAU;AACd;AAAA,QACD;AACA,kBAAU,OAAO,KAAK,QAAQ;AAAA,MAC/B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,qBAAqB,GAAG;AAAA,QACzB;AAAA,MACD;AAAA,IACD;AAEA,UAAM,MAAM,iDAAiD,SAAS,WAAW,KAAK,GAAG,YAAY,IAAI,SAAS,KAAK,EAAE;AAGzH,UAAM,UAAU;AAAA,MACf,gBAAgB;AAAA,MAChB,eAAe,UAAU,MAAM;AAAA,IAChC;AAEA,UAAM,OAAO,KAAK,UAAU,MAAM;AAGlC,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACD,CAAC;AAGD,QAAI,mBAAmB;AACtB,aAAO;AAAA,IACR;AAGA,QAAK,OAAiC,WAAW,MAAM;AACtD,UAAI,SAAS,MAAM;AAClB,eAAO,SAAS;AAAA,MACjB;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC5D;AAGA,UAAM,OAAO,MAAM,SAAS,KAEzB;AACH,WAAO,KAAK;AAAA,EACb;AACD;;;ACrHA,IAAM,SAAS;AACf,IAAM,SAAS,OAAO,IAAI,MAAM;AALhC,IAAA;AAWO,IAAM,cAAN,MAAMA,qBAAmB,MAAM;;;;;;;;;EAgBpC,YAAY;IACV,MAAAC;IACA;IACA;EACF,GAIG;AACD,UAAM,OAAO;AAxBf,SAAkB,EAAA,IAAU;AA0B1B,SAAK,OAAOA;AACZ,SAAK,QAAQ;EACf;;;;;;EAOA,OAAO,WAAW,OAAqC;AACrD,WAAOD,aAAW,UAAU,OAAO,MAAM;EAC3C;EAEA,OAAiB,UAAU,OAAgBE,UAAyB;AAClE,UAAM,eAAe,OAAO,IAAIA,QAAM;AACtC,WACE,SAAS,QACT,OAAO,UAAU,YACjB,gBAAgB,SAChB,OAAO,MAAM,YAAY,MAAM,aAC/B,MAAM,YAAY,MAAM;EAE5B;AACF;AAjDoB,KAAA;AADb,IAAM,aAAN;ACTP,IAAM,OAAO;AACb,IAAMA,UAAS,mBAAmB,IAAI;AACtC,IAAMC,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAOoBC,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;AELpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAME,UAAS,OAAO,IAAID,OAAM;AAJhC,IAAAE;AAUoBC,MAAAC;ACRpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAWoBI,MAAAC;ACTpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAWoBI,MAAAC;ACRpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AALhC,IAAAH;AASoBI,MAAAC;ACPpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,UAAS,mBAAmBD,KAAI;AACtC,IAAMD,UAAS,OAAO,IAAIE,OAAM;AAJhC,IAAAH;AAOoBI,MAAAC;ACLpB,IAAMC,QAAO;AACb,IAAMC,WAAS,mBAAmBD,KAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAUoBI,OAAAC;ACRpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAOoBI,OAAAC;ACLpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AAJhC,IAAAH;AAOoBI,OAAAC;ACJpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAMD,WAAS,OAAO,IAAIE,QAAM;AALhC,IAAAH;AAQoBI,OAAAC;ACNpB,IAAMC,SAAO;AACb,IAAMC,WAAS,mBAAmBD,MAAI;AACtC,IAAME,WAAS,OAAO,IAAID,QAAM;AAJhC,IAAAE;AAMO,IAAM,gCAAN,cAA4C,WAAW;EAK5D,YAAY;IACV;IACA,UAAU,IAAI,aAAa;EAC7B,GAGG;AACD,UAAM,EAAE,MAAAH,QAAM,QAAQ,CAAC;AAXzB,SAAkBG,IAAAA,IAAU;AAY1B,SAAK,gBAAgB;EACvB;EAEA,OAAO,WAAW,OAAwD;AACxE,WAAO,WAAW,UAAU,OAAOF,QAAM;EAC3C;AACF;AAlBoBE,OAAAD;;;AEJb,SAAS,+BAA+B,QAAoD;AAClG,QAAM,WAAgC,CAAC;AAEvC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACvC,YAAQ,MAAM;AAAA,MACb,KAAK,UAAU;AACd,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACD;AAAA,MAEA,KAAK,QAAQ;AACZ,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS,QACP,IAAI,CAAC,SAAS;AACd,oBAAQ,KAAK,MAAM;AAAA,cAClB,KAAK,QAAQ;AACZ,uBAAO,KAAK;AAAA,cACb;AAAA,cACA,KAAK,SAAS;AACb,sBAAM,IAAI,8BAA8B;AAAA,kBACvC,eAAe;AAAA,gBAChB,CAAC;AAAA,cACF;AAAA,YACD;AAAA,UACD,CAAC,EACA,KAAK,EAAE;AAAA,QACV,CAAC;AACD;AAAA,MACD;AAAA,MAEA,KAAK,aAAa;AACjB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC3B,kBAAQ,KAAK,MAAM;AAAA,YAClB,KAAK,QAAQ;AACZ,sBAAQ,KAAK;AACb;AAAA,YACD;AAAA,YACA,KAAK,aAAa;AACjB,qBAAO,KAAK,UAAU;AAAA,gBACrB,MAAM,KAAK;AAAA,gBACX,YAAY,KAAK;AAAA,cAClB,CAAC;AAED,wBAAU,KAAK;AAAA,gBACd,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACpC;AAAA,cACD,CAAC;AACD;AAAA,YACD;AAAA,YACA,SAAS;AACR,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACvD;AAAA,UACD;AAAA,QACD;AAEA,iBAAS,KAAK;AAAA,UACb,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACC,UAAU,SAAS,IAChB,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAAE,QAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC5D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAAA,QAAM,WAAW,KAAK;AAAA,UACnC,EAAE,IACD;AAAA,QACL,CAAC;AAED;AAAA,MACD;AAAA,MACA,KAAK,QAAQ;AACZ,mBAAW,gBAAgB,SAAS;AACnC,mBAAS,KAAK;AAAA,YACb,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC5C,CAAC;AAAA,QACF;AACA;AAAA,MACD;AAAA,MACA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAEA,SAAO;AACR;;;ACxGA;AAgBO,IAAM,iBAAN,cAA6B,gBAAgB;AAAA;AAAA,EAGhD,YAAY,UAAU,EAAE,SAAS,MAAM,GAAG;AACtC,UAAM;AAAA,MACF,WAAW,CAAC,OAAO,eAAe;AAC9B,gBAAQ,mBAAK,gBAAe;AAC5B,eAAO,MAAM;AACT,gBAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,gBAAM,UAAU,QAAQ,UAAU,MAAM,QAAQ,IAAI,IAAI;AACxD,cAAI,YAAY,MAAM,YAAa,MAAM,SAAS,MAC7C,YAAY,MAAO,UAAU,IAAK,UAAU;AAC7C,uBAAW,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC;AAC1C,oBAAQ,MAAM,MAAM,UAAU,CAAC;AAC/B;AAAA,UACJ;AACA,cAAI,YAAY;AACZ;AACJ,gBAAM,WAAW,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,IAAI;AAC7D,qBAAW,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC;AAC3C,kBAAQ,MAAM,MAAM,UAAU,CAAC;AAAA,QACnC;AACA,2BAAK,cAAe;AAAA,MACxB;AAAA,MACA,OAAO,CAAC,eAAe;AACnB,YAAI,mBAAK,kBAAiB;AACtB;AACJ,cAAM,cAAc,QAAQ,WAAW,mBAAK,cAAa,SAAS,IAAI,IAChE,mBAAK,cAAa,MAAM,GAAG,EAAE,IAC7B,mBAAK;AACX,mBAAW,QAAQ,WAAW;AAAA,MAClC;AAAA,IACJ,CAAC;AA/BL,qCAAe;AAAA,EAgCf;AACJ;AAjCI;;;AChBG,SAAS,OAAO,OAAO;AAC1B,MAAI,UAAU,IAAI,kBAAkB;AACpC,MAAIC,SAAQ,IAAI,eAAe,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,MAAM,YAAY,OAAO,EAAE,YAAYA,MAAK;AACvD;AACO,SAAS,MAAM,OAAO;AACzB,MAAI,MAAM;AACV,MAAI,QAAQ,IAAI,KAAK,KAAK;AAE1B,MAAI,MAAM,SAAS,MAAM;AACzB,MAAI,KAAK;AACL,WAAO;AAAA,MACH,MAAM,UAAU,GAAG,GAAG;AAAA,MACtB,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,MAAM;AAAA,IACzC;AAAA,EACJ;AACJ;;;ACgBA,gBAAuB,OAAO,KAAK,QAAQ;AAEvC,MAAI,CAAC,IAAI;AACL;AACJ,MAAI,OAAa,OAAO,IAAI,IAAI;AAChC,MAAI,MAAM,SAAS,KAAK,UAAU;AAClC,MAAI;AACJ,aAAS;AACL,QAAI,UAAU,OAAO,SAAS;AAC1B,aAAO,OAAO,OAAO;AAAA,IACzB;AACA,WAAO,MAAM,OAAO,KAAK;AACzB,QAAI,KAAK;AACL;AACJ,QAAI,CAAC,KAAK,OAAO;AACb,UAAI;AACA,cAAM;AACV,cAAQ;AACR;AAAA,IACJ;AACA,QAAI,CAAC,OAAO,KAAK,IAAU,MAAM,KAAK,KAAK,KAAK,CAAC;AACjD,QAAI,CAAC;AACD;AACJ,QAAI,UAAU,QAAQ;AAClB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,MAAM,KAAK,IAAK,MAAM,KAAK,IAAI,OAAO,QAAS;AAAA,IAClE,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI;AAAA,IACnB,WACS,UAAU,MAAM;AACrB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B,WACS,UAAU,SAAS;AACxB,wBAAU,CAAC;AACX,YAAM,KAAK,IAAI,CAAC,SAAS;AAAA,IAC7B;AAAA,EACJ;AACJ;;;ACzEO,SAAS,kBAAkB,QAAsD;AACvF,QAAM,QACL,OAGC,SAAS;AAAA,IACV,eAAe;AAAA,IACf,mBAAmB;AAAA,EACpB;AAEA,SAAO;AAAA,IACN,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,EACzB;AACD;;;ACMO,IAAM,6BAAN,MAA4D;AAAA,EASlE,YACC,SACA,UACA,QACC;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOhB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAEQ,QAAQ;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAiD;AAChD,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC7B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,QAAI,mBAAmB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,WAAW;AAAA;AAAA,MAEhB,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,+BAA+B,MAAM;AAAA,IAChD;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK,WAAW;AACf,eAAO;AAAA,UACN,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,iBAAiB;AAAA,cAChB,MAAM;AAAA,cACN,aAAa,KAAK;AAAA,YACnB;AAAA,YACA,OAAO;AAAA,UACR;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,MAEA,KAAK,eAAe;AACnB,eAAO;AAAA,UACN,MAAM;AAAA,YACL,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UAClD;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACtB,cAAM,IAAI,8BAA8B;AAAA,UACvC,eAAe;AAAA,QAChB,CAAC;AAAA,MACF;AAAA,MAEA,SAAS;AACR,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,WACL,SAC8D;AAC9D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,SAAS,YAAY,GAAG,mBAAmB,IAAI,KAAK;AAE5D,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,QACC,UAAU,KAAK;AAAA,QACf,YAAY,KAAK;AAAA,QACjB,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA,QAEZ,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,kBAAkB,gBAAgB;AACrC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,WAAO;AAAA,MACN,MACC,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,OACxD,KAAK,UAAU,OAAO,QAAQ,IAC9B,OAAO;AAAA,MACX,WAAW,OAAO,YAAY,IAAI,CAAC,cAAc;AAAA,QAChD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS;AAAA,QACnB,MAAM,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,MAC9C,EAAE;AAAA,MACF,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO,kBAAkB,MAAM;AAAA,MAC/B;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SACL,SAC4D;AAC5D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAK/C,QAAI,KAAK,OAAO,UAAU,mBAAmB,KAAK,QAAQ,GAAG;AAC5D,YAAMC,YAAW,MAAM,KAAK,WAAW,OAAO;AAE9C,UAAIA,qBAAoB,gBAAgB;AACvC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACxC;AAEA,aAAO;AAAA,QACN,QAAQ,IAAI,eAA0C;AAAA,UACrD,MAAM,MAAM,YAAY;AACvB,gBAAIA,UAAS,MAAM;AAClB,yBAAW,QAAQ;AAAA,gBAClB,MAAM;AAAA,gBACN,WAAWA,UAAS;AAAA,cACrB,CAAC;AAAA,YACF;AACA,gBAAIA,UAAS,WAAW;AACvB,yBAAW,YAAYA,UAAS,WAAW;AAC1C,2BAAW,QAAQ;AAAA,kBAClB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACJ,CAAC;AAAA,cACF;AAAA,YACD;AACA,uBAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAOA,UAAS;AAAA,YACjB,CAAC;AACD,uBAAW,MAAM;AAAA,UAClB;AAAA,QACD,CAAC;AAAA,QACD,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,QACvD;AAAA,MACD;AAAA,IACD;AAGA,UAAM,EAAE,SAAS,GAAG,mBAAmB,IAAI,KAAK;AAEhD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MAC1C,KAAK;AAAA,MACL;AAAA,QACC,UAAU,KAAK;AAAA,QACf,YAAY,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,aAAa,KAAK;AAAA,QAClB,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA;AAAA,QAEZ,iBAAiB,KAAK;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,KAAK,OAAO,WAAW,SAAS,GAAG,mBAAmB;AAAA,IAClE;AAEA,QAAI,EAAE,oBAAoB,iBAAiB;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACxC;AAEA,UAAM,aAAa,OAAO,IAAI,SAAS,QAAQ,CAAC;AAChD,QAAI,QAAQ,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAEnD,WAAO;AAAA,MACN,QAAQ,IAAI,eAA0C;AAAA,QACrD,MAAM,MAAM,YAAY;AACvB,2BAAiB,SAAS,YAAY;AACrC,gBAAI,CAAC,MAAM,MAAM;AAChB;AAAA,YACD;AACA,gBAAI,MAAM,SAAS,UAAU;AAC5B;AAAA,YACD;AACA,kBAAM,QAAQ,KAAK,MAAM,MAAM,IAAI;AACnC,gBAAI,MAAM,OAAO;AAChB,sBAAQ,kBAAkB,KAAK;AAAA,YAChC;AACA,kBAAM,UAAU,UACf,WAAW,QAAQ;AAAA,cAClB,MAAM;AAAA,cACN,WAAW,MAAM;AAAA,YAClB,CAAC;AAAA,UACH;AACA,qBAAW,QAAQ;AAAA,YAClB,MAAM;AAAA,YACN,cAAc;AAAA,YACd;AAAA,UACD,CAAC;AACD,qBAAW,MAAM;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,MACD,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,0BACR,MAGC;AAED,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AAClB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACxC,MAAM;AAAA,IACN,UAAU;AAAA,MACT,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IAClB;AAAA,EACD,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACvB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACrD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IAChD,KAAK;AACJ,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIjD,KAAK;AACJ,aAAO;AAAA,QACN,OAAO,YAAY,OAAO,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW,QAAQ;AAAA,QAC9E,aAAa;AAAA,MACd;AAAA,IACD,SAAS;AACR,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACnE;AAAA,EACD;AACD;AAEA,SAAS,mBAAmB,UAA+B;AAC1D,SAAO,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,CAAC,EAAE,SAAS;AACtE;;;AC7UO,IAAM,sBAAN,MAAkD;AAAA,EAUxD,YACU,SACA,UACA,QACR;AAHQ;AACA;AACA;AAZV,wBAAS,wBAAuB;AAAA,EAa7B;AAAA,EAXH,IAAI,mBAA2B;AAC9B,WAAO,KAAK,SAAS,oBAAoB;AAAA,EAC1C;AAAA,EAEA,IAAI,WAAmB;AACtB,WAAO,KAAK,OAAO;AAAA,EACpB;AAAA,EAOA,MAAM,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;AAAA,EAGD,GAEE;AACD,UAAM,EAAE,OAAO,OAAO,IAAI,4BAA4B,IAAI;AAE1D,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACxB,eAAS,KAAK;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACV,CAAC;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY;AACjC,YAAM,eAA2C,MAAM,KAAK,OAAO,QAAQ;AAAA,QAC1E,KAAK;AAAA,QACL;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAGA,aAAO,mBAAmB,YAAY;AAAA,IACvC;AAEA,UAAM,SAAuB,MAAM,QAAQ;AAAA,MAC1C,MAAM,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAAC;AAAA,IAChD;AAIA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACT,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS,KAAK;AAAA,QACd,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAEA,SAAS,4BAA4B,MAA0B;AAC9D,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,QAAW,MAAS;AAEjE,SAAO;AAAA,IACN,OAAO,aAAa,KAAK;AAAA,IACzB,QAAQ,aAAa,MAAM;AAAA,EAC5B;AACD;AAEA,SAAS,aAAa,OAAgB;AACrC,MAAI,UAAU,MAAM,CAAC,MAAO,QAAO;AACnC,QAAM,SAAS,OAAO,KAAK;AAC3B,SAAO,OAAO,UAAU,MAAM,IAAI,SAAS;AAC5C;AAEA,eAAe,mBAAmBC,SAAyD;AAC1F,QAAM,SAASA,QAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAC9B,MAAI,cAAc;AAGlB,SAAO,MAAM;AACZ,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,WAAO,KAAK,KAAK;AACjB,mBAAe,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC3B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EACjB;AACA,SAAO;AACR;;;ACxDO,SAAS,gBAAgB,SAAuC;AAGtE,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACpB,cAAU,QAAQ;AAAA,EACnB,OAAO;AACN,UAAM,EAAE,WAAW,OAAO,IAAI;AAC9B,cAAU;AAAA,MACT,KAAK,UAAU,EAAE,WAAW,OAAO,CAAC;AAAA,IACrC;AAAA,EACD;AAEA,MAAI,CAAC,SAAS;AACb,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,kBAAkB,CAAC,SAA+B,WAAkC,CAAC,MAC1F,IAAI,2BAA2B,SAAS,UAAU;AAAA,IACjD,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,mBAAmB,CACxB,SACA,WAAmC,CAAC,MAEpC,IAAI,oBAAoB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS,QAAQ;AAAA,EAClB,CAAC;AAEF,QAAM,WAAW,CAAC,SAA+B,aAAqC;AACrF,QAAI,YAAY;AACf,YAAM,IAAI,MAAM,qEAAqE;AAAA,IACtF;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EACzC;AAEA,WAAS,OAAO;AAChB,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,SAAO;AACR;","names":["_AISDKError","name","marker","symbol","_a","_a","symbol","name","marker","_a","symbol","name","marker","symbol","_a","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","_a","symbol","name","marker","symbol","_a","name","split","response","stream"]}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "workers-ai-provider",
3
3
  "description": "Workers AI Provider for the vercel AI SDK",
4
4
  "type": "module",
5
- "version": "0.2.2",
5
+ "version": "0.3.0",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
8
8
  "repository": {
package/src/utils.ts CHANGED
@@ -29,6 +29,8 @@ export interface AiRun {
29
29
  ): Promise<AiModels[Name]["postProcessedOutputs"]>;
30
30
  }
31
31
 
32
+ export type StringLike = string | { toString(): string };
33
+
32
34
  /**
33
35
  * Parameters for configuring the Cloudflare-based AI runner.
34
36
  */
@@ -60,9 +62,28 @@ export function createRun(config: CreateRunConfig): AiRun {
60
62
  return async function run<Name extends keyof AiModels>(
61
63
  model: Name,
62
64
  inputs: AiModels[Name]["inputs"],
63
- options?: AiOptions,
65
+ options?: AiOptions & Record<string, StringLike>,
64
66
  ): Promise<Response | ReadableStream<Uint8Array> | AiModels[Name]["postProcessedOutputs"]> {
65
- const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}`;
67
+ const { gateway, prefix, extraHeaders, returnRawResponse, ...passthroughOptions } =
68
+ options || {};
69
+
70
+ const urlParams = new URLSearchParams();
71
+ for (const [key, value] of Object.entries(passthroughOptions)) {
72
+ // throw a useful error if the value is not to-stringable
73
+ try {
74
+ const valueStr = value.toString();
75
+ if (!valueStr) {
76
+ continue;
77
+ }
78
+ urlParams.append(key, valueStr);
79
+ } catch (error) {
80
+ throw new Error(
81
+ `Value for option '${key}' is not able to be coerced into a string.`,
82
+ );
83
+ }
84
+ }
85
+
86
+ const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/run/${model}${urlParams ? `?${urlParams}` : ""}`;
66
87
 
67
88
  // Merge default and custom headers.
68
89
  const headers = {
@@ -80,7 +101,7 @@ export function createRun(config: CreateRunConfig): AiRun {
80
101
  });
81
102
 
82
103
  // (1) If the user explicitly requests the raw Response, return it as-is.
83
- if (options?.returnRawResponse) {
104
+ if (returnRawResponse) {
84
105
  return response;
85
106
  }
86
107
 
@@ -139,6 +139,8 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
139
139
  ): Promise<Awaited<ReturnType<LanguageModelV1["doGenerate"]>>> {
140
140
  const { args, warnings } = this.getArgs(options);
141
141
 
142
+ const { gateway, safePrompt, ...passthroughOptions } = this.settings;
143
+
142
144
  const output = await this.config.binding.run(
143
145
  args.model,
144
146
  {
@@ -150,7 +152,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
150
152
  // @ts-expect-error response_format not yet added to types
151
153
  response_format: args.response_format,
152
154
  },
153
- { gateway: this.config.gateway ?? this.settings.gateway },
155
+ { gateway: this.config.gateway ?? gateway, ...passthroughOptions },
154
156
  );
155
157
 
156
158
  if (output instanceof ReadableStream) {
@@ -221,6 +223,8 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
221
223
  }
222
224
 
223
225
  // [2] ...otherwise, we just proceed as normal and stream the response directly from the remote model.
226
+ const { gateway, ...passthroughOptions } = this.settings;
227
+
224
228
  const response = await this.config.binding.run(
225
229
  args.model,
226
230
  {
@@ -233,7 +237,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
233
237
  // @ts-expect-error response_format not yet added to types
234
238
  response_format: args.response_format,
235
239
  },
236
- { gateway: this.config.gateway ?? this.settings.gateway },
240
+ { gateway: this.config.gateway ?? gateway, ...passthroughOptions },
237
241
  );
238
242
 
239
243
  if (!(response instanceof ReadableStream)) {
@@ -1,4 +1,6 @@
1
- export interface WorkersAIChatSettings {
1
+ import type { StringLike } from "./utils";
2
+
3
+ export type WorkersAIChatSettings = {
2
4
  /**
3
5
  * Whether to inject a safety prompt before all conversations.
4
6
  * Defaults to `false`.
@@ -10,4 +12,9 @@ export interface WorkersAIChatSettings {
10
12
  * @deprecated
11
13
  */
12
14
  gateway?: GatewayOptions;
13
- }
15
+ } & {
16
+ /**
17
+ * Passthrough settings that are provided directly to the run function.
18
+ */
19
+ [key: string]: StringLike;
20
+ };