@browser-ai/core 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/convert-to-built-in-ai-messages.ts","../src/tool-calling/format-tool-results.ts","../src/tool-calling/build-json-system-prompt.ts","../src/tool-calling/parse-json-function-calls.ts","../src/utils/warnings.ts","../src/utils/prompt-utils.ts","../src/utils/tool-utils.ts","../src/models/session-manager.ts","../src/streaming/tool-call-detector.ts","../src/built-in-ai-language-model.ts","../src/built-in-ai-embedding-model.ts","../src/built-in-ai-provider.ts"],"sourcesContent":["import {\r\n LanguageModelV2Prompt,\r\n LanguageModelV2ToolCallPart,\r\n LanguageModelV2ToolResultPart,\r\n LanguageModelV2ToolResultOutput,\r\n UnsupportedFunctionalityError,\r\n} from \"@ai-sdk/provider\";\r\nimport { formatToolResults } from \"./tool-calling/format-tool-results\";\r\nimport type { ToolResult } from \"./tool-calling/types\";\r\nexport interface ConvertedMessages {\r\n systemMessage?: string;\r\n messages: LanguageModelMessage[];\r\n}\r\n\r\n/**\r\n * Convert base64 string to Uint8Array for built-in AI compatibility\r\n * Built-in AI supports BufferSource (including Uint8Array) for image/audio data\r\n */\r\nfunction convertBase64ToUint8Array(base64: string): Uint8Array {\r\n try {\r\n const binaryString = atob(base64);\r\n const bytes = new Uint8Array(binaryString.length);\r\n for (let i = 0; i < binaryString.length; i++) {\r\n bytes[i] = binaryString.charCodeAt(i);\r\n }\r\n return bytes;\r\n } catch (error) {\r\n throw new Error(`Failed to convert base64 to Uint8Array: ${error}`);\r\n }\r\n}\r\n\r\n/**\r\n * Convert file data to the appropriate format for built-in AI\r\n * Built-in AI supports: Blob, BufferSource (Uint8Array), URLs\r\n */\r\nfunction convertFileData(\r\n data: URL | Uint8Array | string,\r\n mediaType: string,\r\n): Uint8Array | string {\r\n // Handle different data types from Vercel AI SDK\r\n if (data instanceof URL) {\r\n // URLs - keep as string (if supported by provider)\r\n return data.toString();\r\n }\r\n\r\n if (data instanceof Uint8Array) {\r\n // Already in correct format\r\n return data;\r\n }\r\n\r\n if (typeof data === \"string\") {\r\n // Base64 string from AI SDK - convert to Uint8Array\r\n return convertBase64ToUint8Array(data);\r\n }\r\n\r\n // Exhaustive check - this should never happen with the union type\r\n const exhaustiveCheck: never = data;\r\n throw new Error(`Unexpected data type for ${mediaType}: ${exhaustiveCheck}`);\r\n}\r\n\r\nfunction normalizeToolArguments(input: unknown): unknown {\r\n if (input === undefined) {\r\n return {};\r\n }\r\n\r\n if (typeof input === \"string\") {\r\n try {\r\n return JSON.parse(input);\r\n } catch {\r\n return input;\r\n }\r\n }\r\n\r\n return input ?? {};\r\n}\r\n\r\nfunction formatToolCallsJson(parts: LanguageModelV2ToolCallPart[]): string {\r\n if (!parts.length) {\r\n return \"\";\r\n }\r\n\r\n const payloads = parts.map((call) => {\r\n const payload: Record<string, unknown> = {\r\n name: call.toolName,\r\n arguments: normalizeToolArguments(call.input),\r\n };\r\n\r\n if (call.toolCallId) {\r\n payload.id = call.toolCallId;\r\n }\r\n\r\n return JSON.stringify(payload);\r\n });\r\n\r\n return `\\`\\`\\`tool_call\r\n${payloads.join(\"\\n\")}\r\n\\`\\`\\``;\r\n}\r\n\r\nfunction convertToolResultOutput(output: LanguageModelV2ToolResultOutput): {\r\n value: unknown;\r\n isError: boolean;\r\n} {\r\n switch (output.type) {\r\n case \"text\":\r\n return { value: output.value, isError: false };\r\n case \"json\":\r\n return { value: output.value, isError: false };\r\n case \"error-text\":\r\n return { value: output.value, isError: true };\r\n case \"error-json\":\r\n return { value: output.value, isError: true };\r\n case \"content\":\r\n return { value: output.value, isError: false };\r\n default: {\r\n const exhaustiveCheck: never = output;\r\n return { value: exhaustiveCheck, isError: false };\r\n }\r\n }\r\n}\r\n\r\nfunction toToolResult(part: LanguageModelV2ToolResultPart): ToolResult {\r\n const { value, isError } = convertToolResultOutput(part.output);\r\n return {\r\n toolCallId: part.toolCallId,\r\n toolName: part.toolName,\r\n result: value,\r\n isError,\r\n };\r\n}\r\n\r\n/**\r\n * Convert Vercel AI SDK prompt format to built-in AI Prompt API format\r\n * Returns system message (for initialPrompts) and regular messages (for prompt method)\r\n */\r\nexport function convertToBuiltInAIMessages(\r\n prompt: LanguageModelV2Prompt,\r\n): ConvertedMessages {\r\n const normalizedPrompt = prompt.slice();\r\n\r\n let systemMessage: string | undefined;\r\n const messages: LanguageModelMessage[] = [];\r\n\r\n for (const message of normalizedPrompt) {\r\n switch (message.role) {\r\n case \"system\": {\r\n // There's only ever one system message from AI SDK\r\n systemMessage = message.content;\r\n break;\r\n }\r\n\r\n case \"user\": {\r\n messages.push({\r\n role: \"user\",\r\n content: message.content.map((part) => {\r\n switch (part.type) {\r\n case \"text\": {\r\n return {\r\n type: \"text\",\r\n value: part.text,\r\n } as LanguageModelMessageContent;\r\n }\r\n\r\n case \"file\": {\r\n const { mediaType, data } = part;\r\n\r\n if (mediaType?.startsWith(\"image/\")) {\r\n const convertedData = convertFileData(data, mediaType);\r\n\r\n return {\r\n type: \"image\",\r\n value: convertedData,\r\n } as LanguageModelMessageContent;\r\n } else if (mediaType?.startsWith(\"audio/\")) {\r\n const convertedData = convertFileData(data, mediaType);\r\n\r\n return {\r\n type: \"audio\",\r\n value: convertedData,\r\n } as LanguageModelMessageContent;\r\n } else {\r\n throw new UnsupportedFunctionalityError({\r\n functionality: `file type: ${mediaType}`,\r\n });\r\n }\r\n }\r\n\r\n default: {\r\n const exhaustiveCheck: never = part;\r\n throw new UnsupportedFunctionalityError({\r\n functionality: `content type: ${(exhaustiveCheck as { type?: string }).type ?? \"unknown\"}`,\r\n });\r\n }\r\n }\r\n }),\r\n } as LanguageModelMessage);\r\n break;\r\n }\r\n\r\n case \"assistant\": {\r\n let text = \"\";\r\n const toolCallParts: LanguageModelV2ToolCallPart[] = [];\r\n\r\n for (const part of message.content) {\r\n switch (part.type) {\r\n case \"text\": {\r\n text += part.text;\r\n break;\r\n }\r\n case \"reasoning\": {\r\n text += part.text;\r\n break;\r\n }\r\n case \"tool-call\": {\r\n toolCallParts.push(part);\r\n break;\r\n }\r\n case \"file\": {\r\n throw new UnsupportedFunctionalityError({\r\n functionality: \"assistant file attachments\",\r\n });\r\n }\r\n case \"tool-result\": {\r\n throw new UnsupportedFunctionalityError({\r\n functionality:\r\n \"tool-result parts in assistant messages (should be in tool messages)\",\r\n });\r\n }\r\n default: {\r\n const exhaustiveCheck: never = part;\r\n throw new UnsupportedFunctionalityError({\r\n functionality: `assistant part type: ${(exhaustiveCheck as { type?: string }).type ?? \"unknown\"}`,\r\n });\r\n }\r\n }\r\n }\r\n\r\n const toolCallJson = formatToolCallsJson(toolCallParts);\r\n const contentSegments: string[] = [];\r\n\r\n if (text.trim().length > 0) {\r\n contentSegments.push(text);\r\n } else if (text.length > 0) {\r\n // preserve purely whitespace responses so we don't lose formatting\r\n contentSegments.push(text);\r\n }\r\n\r\n if (toolCallJson) {\r\n contentSegments.push(toolCallJson);\r\n }\r\n\r\n const content =\r\n contentSegments.length > 0 ? contentSegments.join(\"\\n\") : \"\";\r\n\r\n messages.push({\r\n role: \"assistant\",\r\n content,\r\n } as LanguageModelMessage);\r\n break;\r\n }\r\n\r\n case \"tool\": {\r\n const toolParts = message.content as LanguageModelV2ToolResultPart[];\r\n const results: ToolResult[] = toolParts.map(toToolResult);\r\n const toolResultsJson = formatToolResults(results);\r\n\r\n messages.push({\r\n role: \"user\",\r\n content: toolResultsJson,\r\n } as LanguageModelMessage);\r\n break;\r\n }\r\n\r\n default: {\r\n const exhaustiveCheck: never = message;\r\n throw new Error(\r\n `Unsupported role: ${(exhaustiveCheck as { role?: string }).role ?? \"unknown\"}`,\r\n );\r\n }\r\n }\r\n }\r\n\r\n return { systemMessage, messages };\r\n}\r\n","import type { ToolResult } from \"./types\";\n\n/**\n * Builds a JSON-serializable payload for a single tool result.\n * Includes tool name, result data, error flag, and optional call ID.\n *\n * @param result - The tool execution result to format\n * @returns Object containing formatted result data ready for JSON serialization\n */\nfunction buildResultPayload(result: ToolResult): Record<string, unknown> {\n const payload: Record<string, unknown> = {\n name: result.toolName,\n result: result.result ?? null,\n error: Boolean(result.isError),\n };\n\n if (result.toolCallId) {\n payload.id = result.toolCallId;\n }\n\n return payload;\n}\n\n/**\n * Formats tool execution results as JSON for continuation in the conversation.\n *\n * Each result is serialized as a single JSON object. Multiple results (for parallel\n * execution scenarios) are emitted on separate lines within a ```tool_result code fence.\n *\n * @param results - Array of tool execution results to format\n * @returns Formatted string with results in tool_result code fence, or empty string if no results\n * @example\n * ```typescript\n * formatToolResults([\n * { toolCallId: \"call_123\", toolName: \"search\", result: { data: \"...\" } }\n * ])\n * // Returns: ```tool_result\\n{\"id\":\"call_123\",\"name\":\"search\",\"result\":{...},\"error\":false}\\n```\n * ```\n */\nexport function formatToolResults(results: ToolResult[]): string {\n if (!results || results.length === 0) {\n return \"\";\n }\n\n const payloads = results.map((result) =>\n JSON.stringify(buildResultPayload(result)),\n );\n\n return `\\`\\`\\`tool_result\n${payloads.join(\"\\n\")}\n\\`\\`\\``;\n}\n\n/**\n * Formats a single tool result.\n * Convenience wrapper around formatToolResults for single result scenarios.\n *\n * @param result - The tool execution result to format\n * @returns Formatted string with result in tool_result code fence\n */\nexport function formatSingleToolResult(result: ToolResult): string {\n return formatToolResults([result]);\n}\n","import type {\r\n JSONSchema7,\r\n LanguageModelV2FunctionTool,\r\n} from \"@ai-sdk/provider\";\r\nimport type { ToolDefinition } from \"./types\";\r\n\r\n/**\r\n * Builds an enhanced system prompt for JSON-based tool calling.\r\n * The model receives JSON schemas and is expected to return JSON tool calls.\r\n *\r\n * @param originalSystemPrompt - The original system prompt (if any)\r\n * @param tools - Array of available tool definitions\r\n * @param options - Configuration options for tool calling behavior (unused, kept for backwards compatibility)\r\n * @returns Enhanced system prompt with JSON tool calling instructions\r\n */\r\nexport function buildJsonToolSystemPrompt(\r\n originalSystemPrompt: string | undefined,\r\n tools: Array<ToolDefinition | LanguageModelV2FunctionTool>,\r\n options?: { allowParallelToolCalls?: boolean },\r\n): string {\r\n if (!tools || tools.length === 0) {\r\n return originalSystemPrompt || \"\";\r\n }\r\n\r\n const parallelInstruction =\r\n \"Only request one tool call at a time. Wait for tool results before asking for another tool.\";\r\n\r\n const toolSchemas = tools.map((tool) => {\r\n const schema = getParameters(tool);\r\n return {\r\n name: tool.name,\r\n description: tool.description ?? \"No description provided.\",\r\n parameters: schema || { type: \"object\", properties: {} },\r\n };\r\n });\r\n\r\n const toolsJson = JSON.stringify(toolSchemas, null, 2);\r\n\r\n const instructionBody = `You are a helpful AI assistant with access to tools.\r\n\r\n# Available Tools\r\n${toolsJson}\r\n\r\n# Tool Calling Instructions\r\n${parallelInstruction}\r\n\r\nTo call a tool, output JSON in this exact format inside a \\`\\`\\`tool_call code fence:\r\n\r\n\\`\\`\\`tool_call\r\n{\"name\": \"tool_name\", \"arguments\": {\"param1\": \"value1\", \"param2\": \"value2\"}}\r\n\\`\\`\\`\r\n\r\nTool responses will be provided in \\`\\`\\`tool_result fences. Each line contains JSON like:\r\n\\`\\`\\`tool_result\r\n{\"id\": \"call_123\", \"name\": \"tool_name\", \"result\": {...}, \"error\": false}\r\n\\`\\`\\`\r\nUse the \\`result\\` payload (and treat \\`error\\` as a boolean flag) when continuing the conversation.\r\n\r\nImportant:\r\n- Use exact tool and parameter names from the schema above\r\n- Arguments must be a valid JSON object matching the tool's parameters\r\n- You can include brief reasoning before or after the tool call\r\n- If no tool is needed, respond directly without tool_call fences`;\r\n\r\n if (originalSystemPrompt?.trim()) {\r\n return `${originalSystemPrompt.trim()}\\n\\n${instructionBody}`;\r\n }\r\n\r\n return instructionBody;\r\n}\r\n\r\n/**\r\n * Extracts the parameters/input schema from a tool definition.\r\n * Handles both ToolDefinition (parameters field) and LanguageModelV2FunctionTool (inputSchema field).\r\n *\r\n * @param tool - The tool definition to extract parameters from\r\n * @returns The JSON Schema for the tool's parameters, or undefined if not present\r\n */\r\nfunction getParameters(\r\n tool: ToolDefinition | LanguageModelV2FunctionTool,\r\n): JSONSchema7 | undefined {\r\n if (\"parameters\" in tool) {\r\n return tool.parameters;\r\n }\r\n\r\n return tool.inputSchema as JSONSchema7 | undefined;\r\n}\r\n","import type { ParsedResponse, ParsedToolCall } from \"./types\";\n\n/**\n * Regular expression to match JSON tool call code fences.\n * Matches blocks like ```tool_call or ```tool-call with content inside.\n */\nconst JSON_TOOL_CALL_FENCE_REGEX = /```tool[_-]?call\\s*([\\s\\S]*?)```/gi;\n\n/**\n * Generates a unique identifier for a tool call.\n * Uses timestamp and random string to ensure uniqueness.\n *\n * @returns A unique tool call ID in the format \"call_{timestamp}_{random}\"\n */\nfunction generateToolCallId(): string {\n return `call_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`;\n}\n\n/**\n * Parses JSON-formatted tool calls from model response.\n * Supports multiple formats:\n * 1. Single object: {\"name\": \"tool\", \"arguments\": {...}}\n * 2. Array: [{\"name\": \"tool1\", ...}, {\"name\": \"tool2\", ...}]\n * 3. Newline-separated objects:\n * {\"name\": \"tool1\", \"arguments\": {...}}\n * {\"name\": \"tool2\", \"arguments\": {...}}\n *\n * @param response - The model's response text to parse\n * @returns Object containing parsed tool calls and remaining text content\n */\nexport function parseJsonFunctionCalls(response: string): ParsedResponse {\n const matches = Array.from(response.matchAll(JSON_TOOL_CALL_FENCE_REGEX));\n JSON_TOOL_CALL_FENCE_REGEX.lastIndex = 0;\n\n if (matches.length === 0) {\n return { toolCalls: [], textContent: response };\n }\n\n const toolCalls: ParsedToolCall[] = [];\n let textContent = response;\n\n for (const match of matches) {\n const [fullFence, innerContent] = match;\n textContent = textContent.replace(fullFence, \"\");\n\n try {\n const trimmed = innerContent.trim();\n\n // Try parsing as a single JSON value first (object or array)\n try {\n const parsed = JSON.parse(trimmed);\n const callsArray = Array.isArray(parsed) ? parsed : [parsed];\n\n for (const call of callsArray) {\n if (!call.name) continue;\n\n toolCalls.push({\n type: \"tool-call\",\n toolCallId: call.id || generateToolCallId(),\n toolName: call.name,\n args: call.arguments || {},\n });\n }\n } catch {\n // If single JSON parsing fails, try parsing as newline-separated JSON objects\n const lines = trimmed.split(\"\\n\").filter((line) => line.trim());\n\n for (const line of lines) {\n try {\n const call = JSON.parse(line.trim());\n if (!call.name) continue;\n\n toolCalls.push({\n type: \"tool-call\",\n toolCallId: call.id || generateToolCallId(),\n toolName: call.name,\n args: call.arguments || {},\n });\n } catch {\n // Skip invalid JSON lines\n continue;\n }\n }\n }\n } catch (error) {\n console.warn(\"Failed to parse JSON tool call:\", error);\n continue;\n }\n }\n\n textContent = textContent.replace(/\\n{2,}/g, \"\\n\");\n\n return { toolCalls, textContent: textContent.trim() };\n}\n\n/**\n * Checks if a response contains JSON-formatted tool calls.\n *\n * @param response - The model's response text to check\n * @returns true if the response contains tool call fences, false otherwise\n */\nexport function hasJsonFunctionCalls(response: string): boolean {\n const hasMatch = JSON_TOOL_CALL_FENCE_REGEX.test(response);\n JSON_TOOL_CALL_FENCE_REGEX.lastIndex = 0;\n return hasMatch;\n}\n\n/**\n * Extracts the first JSON tool call code fence block from a response.\n *\n * @param response - The model's response text to extract from\n * @returns The first tool call fence block (including delimiters), or null if none found\n */\nexport function extractJsonFunctionCallsBlock(response: string): string | null {\n const match = JSON_TOOL_CALL_FENCE_REGEX.exec(response);\n JSON_TOOL_CALL_FENCE_REGEX.lastIndex = 0;\n return match ? match[0] : null;\n}\n","/**\r\n * Warning generation utilities for unsupported settings and tools\r\n */\r\n\r\nimport type {\r\n LanguageModelV2CallWarning,\r\n LanguageModelV2ProviderDefinedTool,\r\n} from \"@ai-sdk/provider\";\r\n\r\n/**\r\n * Creates a warning for an unsupported setting\r\n *\r\n * @param setting - Name of the setting that is not supported\r\n * @param details - Additional details about why it's not supported\r\n * @returns A call warning object\r\n *\r\n * @example\r\n * ```typescript\r\n * const warning = createUnsupportedSettingWarning(\r\n * \"maxOutputTokens\",\r\n * \"maxOutputTokens is not supported by Prompt API\"\r\n * );\r\n * ```\r\n */\r\nexport function createUnsupportedSettingWarning(\r\n setting: string,\r\n details: string,\r\n): LanguageModelV2CallWarning {\r\n return {\r\n type: \"unsupported-setting\",\r\n setting,\r\n details,\r\n };\r\n}\r\n\r\n/**\r\n * Creates a warning for an unsupported tool type\r\n *\r\n * @param tool - The provider-defined tool that is not supported\r\n * @param details - Additional details about why it's not supported\r\n * @returns A call warning object\r\n *\r\n * @example\r\n * ```typescript\r\n * const warning = createUnsupportedToolWarning(\r\n * providerTool,\r\n * \"Only function tools are supported by the Prompt API polyfill\"\r\n * );\r\n * ```\r\n */\r\nexport function createUnsupportedToolWarning(\r\n tool: LanguageModelV2ProviderDefinedTool,\r\n details: string,\r\n): LanguageModelV2CallWarning {\r\n return {\r\n type: \"unsupported-tool\",\r\n tool,\r\n details,\r\n };\r\n}\r\n\r\n/**\r\n * Gathers all warnings for unsupported call options\r\n *\r\n * @param options - The call options to check\r\n * @returns Array of warnings for any unsupported settings\r\n *\r\n * @example\r\n * ```typescript\r\n * const warnings = gatherUnsupportedSettingWarnings({\r\n * maxOutputTokens: 100,\r\n * topP: 0.9,\r\n * temperature: 0.7,\r\n * });\r\n * // Returns warnings for maxOutputTokens and topP\r\n * ```\r\n */\r\nexport function gatherUnsupportedSettingWarnings(options: {\r\n maxOutputTokens?: number;\r\n stopSequences?: string[];\r\n topP?: number;\r\n presencePenalty?: number;\r\n frequencyPenalty?: number;\r\n seed?: number;\r\n toolChoice?: unknown;\r\n}): LanguageModelV2CallWarning[] {\r\n const warnings: LanguageModelV2CallWarning[] = [];\r\n\r\n if (options.maxOutputTokens != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"maxOutputTokens\",\r\n \"maxOutputTokens is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.stopSequences != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"stopSequences\",\r\n \"stopSequences is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.topP != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"topP\",\r\n \"topP is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.presencePenalty != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"presencePenalty\",\r\n \"presencePenalty is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.frequencyPenalty != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"frequencyPenalty\",\r\n \"frequencyPenalty is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.seed != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"seed\",\r\n \"seed is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n if (options.toolChoice != null) {\r\n warnings.push(\r\n createUnsupportedSettingWarning(\r\n \"toolChoice\",\r\n \"toolChoice is not supported by Prompt API\",\r\n ),\r\n );\r\n }\r\n\r\n return warnings;\r\n}\r\n","/**\r\n * Utilities for prompt processing and transformation\r\n */\r\n\r\nimport type { LanguageModelV2Prompt } from \"@ai-sdk/provider\";\r\n\r\n/**\r\n * Detect if the prompt contains multimodal content (images, audio)\r\n *\r\n * @param prompt - The prompt to check\r\n * @returns true if the prompt contains any file content\r\n */\r\nexport function hasMultimodalContent(prompt: LanguageModelV2Prompt): boolean {\r\n for (const message of prompt) {\r\n if (message.role === \"user\") {\r\n for (const part of message.content) {\r\n if (part.type === \"file\") {\r\n return true;\r\n }\r\n }\r\n }\r\n }\r\n return false;\r\n}\r\n\r\n/**\r\n * Get expected inputs based on prompt content.\r\n * Analyzes the prompt to determine what types of inputs (text, image, audio) are used.\r\n * This information is used to configure the Prompt API session with the correct input capabilities.\r\n *\r\n * @param prompt - The prompt to analyze\r\n * @returns Array of expected input types for session creation (only includes image/audio, text is assumed)\r\n * @example\r\n * ```typescript\r\n * const inputs = getExpectedInputs(prompt);\r\n * // Returns: [{ type: \"image\" }] if prompt contains images\r\n * // Returns: [] if prompt only contains text\r\n * ```\r\n */\r\nexport function getExpectedInputs(\r\n prompt: LanguageModelV2Prompt,\r\n): Array<{ type: \"text\" | \"image\" | \"audio\" }> {\r\n const inputs = new Set<\"text\" | \"image\" | \"audio\">();\r\n // Don't add text by default - it's assumed by the Prompt API\r\n\r\n for (const message of prompt) {\r\n if (message.role === \"user\") {\r\n for (const part of message.content) {\r\n if (part.type === \"file\") {\r\n if (part.mediaType?.startsWith(\"image/\")) {\r\n inputs.add(\"image\");\r\n } else if (part.mediaType?.startsWith(\"audio/\")) {\r\n inputs.add(\"audio\");\r\n }\r\n }\r\n }\r\n }\r\n }\r\n\r\n return Array.from(inputs).map((type) => ({ type }));\r\n}\r\n\r\n/**\r\n * Prepends a system prompt to the first user message in the conversation.\r\n *\r\n * This is necessary because the Prompt API doesn't support separate system messages,\r\n * so we inject the system prompt into the first user message instead.\r\n * Creates a shallow copy of messages to avoid mutating the original array.\r\n *\r\n * @param messages - The messages array to modify (not mutated, a copy is returned)\r\n * @param systemPrompt - The system prompt to prepend\r\n * @returns New messages array with system prompt prepended to first user message\r\n * @example\r\n * ```typescript\r\n * const messages = [{ role: \"user\", content: \"Hello\" }];\r\n * const updated = prependSystemPromptToMessages(messages, \"You are a helpful assistant.\");\r\n * // Returns: [{ role: \"user\", content: \"You are a helpful assistant.\\n\\nHello\" }]\r\n * ```\r\n */\r\nexport function prependSystemPromptToMessages(\r\n messages: LanguageModelMessage[],\r\n systemPrompt: string,\r\n): LanguageModelMessage[] {\r\n if (!systemPrompt.trim()) {\r\n return messages;\r\n }\r\n\r\n const prompts = messages.map((message) => ({ ...message }));\r\n const firstUserIndex = prompts.findIndex(\r\n (message) => message.role === \"user\",\r\n );\r\n\r\n if (firstUserIndex !== -1) {\r\n const firstUserMessage = prompts[firstUserIndex];\r\n\r\n if (Array.isArray(firstUserMessage.content)) {\r\n const content = firstUserMessage.content.slice();\r\n content.unshift({\r\n type: \"text\",\r\n value: `${systemPrompt}\\n\\n`,\r\n });\r\n prompts[firstUserIndex] = {\r\n ...firstUserMessage,\r\n content,\r\n } as LanguageModelMessage;\r\n } else if (typeof firstUserMessage.content === \"string\") {\r\n prompts[firstUserIndex] = {\r\n ...firstUserMessage,\r\n content: `${systemPrompt}\\n\\n${firstUserMessage.content}`,\r\n } as LanguageModelMessage;\r\n }\r\n } else {\r\n prompts.unshift({\r\n role: \"user\",\r\n content: systemPrompt,\r\n });\r\n }\r\n\r\n return prompts;\r\n}\r\n","/**\r\n * Utilities for working with AI SDK tools\r\n */\r\n\r\nimport type {\r\n LanguageModelV2FunctionTool,\r\n LanguageModelV2ProviderDefinedTool,\r\n} from \"@ai-sdk/provider\";\r\n\r\n/**\r\n * Type guard to check if a tool is a function tool\r\n *\r\n * @param tool - The tool to check\r\n * @returns true if the tool is a LanguageModelV2FunctionTool\r\n */\r\nexport function isFunctionTool(\r\n tool: LanguageModelV2FunctionTool | LanguageModelV2ProviderDefinedTool,\r\n): tool is LanguageModelV2FunctionTool {\r\n return tool.type === \"function\";\r\n}\r\n","/**\r\n * SessionManager handles the lifecycle of browser AI sessions\r\n * Manages session creation, caching, availability checks, and progress monitoring\r\n */\r\n\r\nimport { LoadSettingError } from \"@ai-sdk/provider\";\r\n\r\n/**\r\n * Progress callback for model download events\r\n * @param progress - Download progress from 0 to 1\r\n */\r\nexport type ProgressCallback = (progress: number) => void;\r\n\r\n/**\r\n * Custom provider options that extend the standard API\r\n */\r\ninterface CustomProviderOptions {}\r\n\r\n/**\r\n * Options for creating a new session\r\n */\r\nexport interface SessionCreateOptions extends LanguageModelCreateOptions {\r\n systemMessage?: string;\r\n expectedInputs?: Array<{ type: \"text\" | \"image\" | \"audio\" }>;\r\n onDownloadProgress?: ProgressCallback;\r\n}\r\n\r\n/**\r\n * Manages browser AI session lifecycle\r\n *\r\n * Responsibilities:\r\n * - Create and cache AI sessions\r\n * - Check model availability\r\n * - Monitor download progress\r\n * - Handle session options and configuration\r\n *\r\n * @example\r\n * ```typescript\r\n * const manager = new SessionManager(config);\r\n *\r\n * // Check availability first\r\n * const status = await manager.checkAvailability();\r\n *\r\n * // Create session with progress tracking\r\n * const session = await manager.getSession({\r\n * temperature: 0.7,\r\n * onDownloadProgress: (p) => console.log(`${p * 100}%`)\r\n * });\r\n * ```\r\n */\r\nexport class SessionManager {\r\n private session: LanguageModel | null = null;\r\n private baseOptions: LanguageModelCreateOptions;\r\n\r\n /**\r\n * Creates a new SessionManager\r\n *\r\n * @param baseOptions - Base configuration options for all sessions\r\n */\r\n constructor(\r\n baseOptions: LanguageModelCreateOptions & Partial<CustomProviderOptions>,\r\n ) {\r\n // Filter out our custom options that aren't part of LanguageModelCreateOptions\r\n this.baseOptions = baseOptions;\r\n }\r\n\r\n /**\r\n * Gets or creates a session with the specified options\r\n *\r\n * If a session already exists, it will be reused unless force create is needed.\r\n *\r\n * @param options - Optional session creation options\r\n * @returns Promise resolving to a LanguageModel session\r\n * @throws {LoadSettingError} When Prompt API is not available or model is unavailable\r\n *\r\n * @example\r\n * ```typescript\r\n * const session = await manager.getSession({\r\n * systemMessage: \"You are a helpful assistant\",\r\n * expectedInputs: [{ type: \"image\" }],\r\n * temperature: 0.8\r\n * });\r\n * ```\r\n */\r\n async getSession(options?: SessionCreateOptions): Promise<LanguageModel> {\r\n // Check if LanguageModel API is available\r\n if (typeof LanguageModel === \"undefined\") {\r\n throw new LoadSettingError({\r\n message:\r\n \"Prompt API is not available. This library requires Chrome or Edge browser with built-in AI capabilities.\",\r\n });\r\n }\r\n\r\n // Return existing session if available\r\n if (this.session) {\r\n return this.session;\r\n }\r\n\r\n // Check availability before attempting to create\r\n const availability = await LanguageModel.availability();\r\n if (availability === \"unavailable\") {\r\n throw new LoadSettingError({\r\n message: \"Built-in model not available in this browser\",\r\n });\r\n }\r\n\r\n // Prepare session options\r\n const sessionOptions = this.prepareSessionOptions(options);\r\n\r\n // Create the session\r\n this.session = await LanguageModel.create(sessionOptions);\r\n\r\n return this.session;\r\n }\r\n\r\n /**\r\n * Creates a session with download progress monitoring\r\n *\r\n * This is a convenience method for users who want explicit progress tracking.\r\n *\r\n * @param onDownloadProgress - Optional callback receiving progress (0-1) during download\r\n * @returns Promise resolving to a LanguageModel session\r\n * @throws {LoadSettingError} When Prompt API is not available or model is unavailable\r\n *\r\n * @example\r\n * ```typescript\r\n * const session = await manager.createSessionWithProgress(\r\n * (progress) => {\r\n * console.log(`Download: ${Math.round(progress * 100)}%`);\r\n * }\r\n * );\r\n * ```\r\n */\r\n async createSessionWithProgress(\r\n onDownloadProgress?: ProgressCallback,\r\n ): Promise<LanguageModel> {\r\n return this.getSession({ onDownloadProgress });\r\n }\r\n\r\n /**\r\n * Checks the availability status of the built-in AI model\r\n *\r\n * @returns Promise resolving to availability status\r\n * - \"unavailable\": Model is not supported\r\n * - \"downloadable\": Model needs to be downloaded\r\n * - \"downloading\": Model is currently downloading\r\n * - \"available\": Model is ready to use\r\n *\r\n * @example\r\n * ```typescript\r\n * const status = await manager.checkAvailability();\r\n * if (status === \"downloadable\") {\r\n * console.log(\"Model needs to be downloaded first\");\r\n * }\r\n * ```\r\n */\r\n async checkAvailability(): Promise<Availability> {\r\n if (typeof LanguageModel === \"undefined\") {\r\n return \"unavailable\";\r\n }\r\n return LanguageModel.availability();\r\n }\r\n\r\n /**\r\n * Gets the current session if it exists\r\n *\r\n * @returns The current session or null if none exists\r\n */\r\n getCurrentSession(): LanguageModel | null {\r\n return this.session;\r\n }\r\n\r\n /**\r\n * Destroys the current session\r\n *\r\n * Use this when you want to force creation of a new session\r\n * with different options on the next getSession call.\r\n */\r\n destroySession(): void {\r\n if (this.session && typeof this.session.destroy === \"function\") {\r\n this.session.destroy();\r\n }\r\n this.session = null;\r\n }\r\n\r\n /**\r\n * Prepares merged session options from base config and request options\r\n *\r\n * @param options - Optional request-specific options\r\n * @returns Merged and sanitized options ready for LanguageModel.create()\r\n * @private\r\n */\r\n private prepareSessionOptions(\r\n options?: SessionCreateOptions,\r\n ): LanguageModelCreateOptions {\r\n // Start with base options\r\n const mergedOptions: LanguageModelCreateOptions &\r\n Partial<CustomProviderOptions> = { ...this.baseOptions };\r\n\r\n // Merge in request-specific options if provided\r\n if (options) {\r\n const {\r\n systemMessage,\r\n expectedInputs,\r\n onDownloadProgress,\r\n ...createOptions\r\n } = options;\r\n\r\n // Merge standard create options\r\n Object.assign(mergedOptions, createOptions);\r\n\r\n // Handle system message\r\n if (systemMessage) {\r\n mergedOptions.initialPrompts = [\r\n { role: \"system\", content: systemMessage },\r\n ];\r\n }\r\n\r\n // Handle expected inputs (for multimodal)\r\n if (expectedInputs && expectedInputs.length > 0) {\r\n mergedOptions.expectedInputs = expectedInputs;\r\n }\r\n\r\n // Handle download progress monitoring\r\n if (onDownloadProgress) {\r\n mergedOptions.monitor = (m: CreateMonitor) => {\r\n m.addEventListener(\"downloadprogress\", (e: ProgressEvent) => {\r\n onDownloadProgress(e.loaded); // e.loaded is between 0 and 1\r\n });\r\n };\r\n }\r\n }\r\n\r\n // Remove any custom options that aren't part of the standard API\r\n this.sanitizeOptions(mergedOptions);\r\n\r\n return mergedOptions;\r\n }\r\n\r\n /**\r\n * Removes custom options that aren't part of LanguageModel.create API\r\n *\r\n * @param options - Options object to sanitize in-place\r\n * @private\r\n */\r\n private sanitizeOptions(\r\n options: LanguageModelCreateOptions & Partial<CustomProviderOptions>,\r\n ): void {\r\n // Remove our custom options that the Prompt API doesn't understand\r\n }\r\n}\r\n","/**\n * ToolCallFenceDetector - Detects and extracts tool call fences from streaming text\n *\n * This module handles the complex task of detecting tool call fences in a stream\n * where fences might be split across multiple chunks. It uses overlap detection\n * to avoid emitting text that might be the beginning of a fence.\n */\n\n/**\n * Result of fence detection operation\n */\nexport interface FenceDetectionResult {\n /** The complete fence if found, null otherwise */\n fence: string | null;\n /** Any text before the fence that can be safely emitted */\n prefixText: string;\n /** Text after the fence (if fence was found) */\n remainingText: string;\n /** Length of potential partial fence at buffer end */\n overlapLength: number;\n}\n\n/**\n * Result of streaming fence content detection\n */\nexport interface StreamingFenceResult {\n /** Whether we're currently inside a fence */\n inFence: boolean;\n /** Content that can be safely emitted (either as text or tool-input-delta) */\n safeContent: string;\n /** The complete fence if it just closed, null otherwise */\n completeFence: string | null;\n /** Text after a completed fence */\n textAfterFence: string;\n}\n\n/**\n * Detects tool call fences in streaming text with support for partial matches\n *\n * @example\n * ```typescript\n * const detector = new ToolCallFenceDetector();\n *\n * // Add chunks as they arrive\n * detector.addChunk(\"Here's the answer: \");\n * detector.addChunk(\"```tool_call\\n<tool_call>\");\n * detector.addChunk(\"<name>search</name></tool_call>\\n```\");\n *\n * // Detect fence\n * const result = detector.detectFence();\n * if (result.fence) {\n * console.log(\"Found tool call!\");\n * }\n * ```\n */\nexport class ToolCallFenceDetector {\n private readonly FENCE_STARTS = [\"```tool_call\"];\n private readonly FENCE_END = \"```\";\n private buffer = \"\";\n\n // Streaming state\n private inFence = false;\n private fenceStartBuffer = \"\"; // Accumulated fence content\n\n /**\n * Adds a chunk of text to the internal buffer\n *\n * @param chunk - Text chunk from the stream\n */\n addChunk(chunk: string): void {\n this.buffer += chunk;\n }\n\n /**\n * Gets the current buffer content\n */\n getBuffer(): string {\n return this.buffer;\n }\n\n /**\n * Clears the internal buffer\n */\n clearBuffer(): void {\n this.buffer = \"\";\n }\n\n /**\n * Detects if there's a complete fence in the buffer\n *\n * This method:\n * 1. Searches for fence start markers\n * 2. If found, looks for closing fence\n * 3. Computes overlap for partial fences\n * 4. Returns safe text that can be emitted\n *\n * @returns Detection result with fence info and safe text\n */\n detectFence(): FenceDetectionResult {\n const { index: startIdx, prefix: matchedPrefix } = this.findFenceStart(\n this.buffer,\n );\n\n // No fence start found\n if (startIdx === -1) {\n // Compute how much of the buffer end might be a partial fence start\n const overlap = this.computeOverlapLength(this.buffer, this.FENCE_STARTS);\n const safeTextLength = this.buffer.length - overlap;\n\n const prefixText =\n safeTextLength > 0 ? this.buffer.slice(0, safeTextLength) : \"\";\n const remaining = overlap > 0 ? this.buffer.slice(-overlap) : \"\";\n\n // Update buffer to keep only the overlap\n this.buffer = remaining;\n\n return {\n fence: null,\n prefixText,\n remainingText: \"\",\n overlapLength: overlap,\n };\n }\n\n // Found fence start - extract prefix text before it\n const prefixText = this.buffer.slice(0, startIdx);\n this.buffer = this.buffer.slice(startIdx);\n\n // Look for closing fence\n const prefixLength = matchedPrefix?.length ?? 0;\n const closingIdx = this.buffer.indexOf(this.FENCE_END, prefixLength);\n\n // Fence not complete yet\n if (closingIdx === -1) {\n // Keep the buffer as-is, waiting for more data\n return {\n fence: null,\n prefixText,\n remainingText: \"\",\n overlapLength: 0,\n };\n }\n\n // Complete fence found!\n const endPos = closingIdx + this.FENCE_END.length;\n const fence = this.buffer.slice(0, endPos);\n const remainingText = this.buffer.slice(endPos);\n\n // Clear the buffer since we extracted everything\n this.buffer = \"\";\n\n return {\n fence,\n prefixText,\n remainingText,\n overlapLength: 0,\n };\n }\n\n /**\n * Finds the first occurrence of any fence start marker\n *\n * @param text - Text to search in\n * @returns Index of first fence start and which prefix matched\n * @private\n */\n private findFenceStart(text: string): {\n index: number;\n prefix: string | null;\n } {\n let bestIndex = -1;\n let matchedPrefix: string | null = null;\n\n for (const prefix of this.FENCE_STARTS) {\n const idx = text.indexOf(prefix);\n if (idx !== -1 && (bestIndex === -1 || idx < bestIndex)) {\n bestIndex = idx;\n matchedPrefix = prefix;\n }\n }\n\n return { index: bestIndex, prefix: matchedPrefix };\n }\n\n /**\n * Computes the maximum overlap between the end of text and the start of any prefix\n *\n * This is crucial for streaming: if the buffer ends with \"``\", we can't emit it\n * because the next chunk might be \"`tool_call\", completing a fence marker.\n *\n * @param text - Text to check for overlap\n * @param prefixes - List of prefixes to check against\n * @returns Length of the maximum overlap found\n *\n * @example\n * ```typescript\n * computeOverlapLength(\"hello ``\", [\"```tool_call\"])\n * // Returns: 2 (because \"``\" matches start of \"```tool_call\")\n *\n * computeOverlapLength(\"hello `\", [\"```tool_call\"])\n * // Returns: 1\n *\n * computeOverlapLength(\"hello world\", [\"```tool_call\"])\n * // Returns: 0 (no overlap)\n * ```\n *\n * @private\n */\n private computeOverlapLength(text: string, prefixes: string[]): number {\n let overlap = 0;\n\n for (const prefix of prefixes) {\n const maxLength = Math.min(text.length, prefix.length - 1);\n\n for (let size = maxLength; size > 0; size -= 1) {\n // Check if the last 'size' characters of text match the first 'size' characters of prefix\n if (prefix.startsWith(text.slice(-size))) {\n overlap = Math.max(overlap, size);\n break;\n }\n }\n }\n\n return overlap;\n }\n\n /**\n * Checks if the buffer currently contains any text\n */\n hasContent(): boolean {\n return this.buffer.length > 0;\n }\n\n /**\n * Gets the buffer size\n */\n getBufferSize(): number {\n return this.buffer.length;\n }\n\n /**\n * Detect and stream fence content in real-time for true incremental streaming\n *\n * This method is designed for streaming tool calls as they arrive:\n * 1. Detects when a fence starts and transitions to \"inFence\" state\n * 2. While inFence, emits safe content that won't conflict with fence end marker\n * 3. When fence ends, returns the complete fence for parsing\n *\n * @returns Streaming result with current state and safe content to emit\n */\n detectStreamingFence(): StreamingFenceResult {\n if (!this.inFence) {\n // Look for fence start\n const { index: startIdx, prefix: matchedPrefix } = this.findFenceStart(\n this.buffer,\n );\n\n if (startIdx === -1) {\n // No fence start found - emit safe text\n const overlap = this.computeOverlapLength(\n this.buffer,\n this.FENCE_STARTS,\n );\n const safeTextLength = this.buffer.length - overlap;\n const safeContent =\n safeTextLength > 0 ? this.buffer.slice(0, safeTextLength) : \"\";\n this.buffer = this.buffer.slice(safeTextLength);\n\n return {\n inFence: false,\n safeContent,\n completeFence: null,\n textAfterFence: \"\",\n };\n }\n\n // Found fence start!\n const prefixText = this.buffer.slice(0, startIdx);\n const fenceStartLength = matchedPrefix?.length ?? 0;\n\n // Move buffer past the fence start marker\n this.buffer = this.buffer.slice(startIdx + fenceStartLength);\n\n // Skip newline after fence start if present\n if (this.buffer.startsWith(\"\\n\")) {\n this.buffer = this.buffer.slice(1);\n }\n\n this.inFence = true;\n this.fenceStartBuffer = \"\";\n\n return {\n inFence: true,\n safeContent: prefixText, // Emit any text before the fence\n completeFence: null,\n textAfterFence: \"\",\n };\n }\n\n // We're inside a fence - look for fence end\n const closingIdx = this.buffer.indexOf(this.FENCE_END);\n\n if (closingIdx === -1) {\n // No fence end yet - emit safe content (leaving potential fence end marker)\n const overlap = this.computeOverlapLength(this.buffer, [this.FENCE_END]);\n const safeContentLength = this.buffer.length - overlap;\n\n if (safeContentLength > 0) {\n const safeContent = this.buffer.slice(0, safeContentLength);\n this.fenceStartBuffer += safeContent;\n this.buffer = this.buffer.slice(safeContentLength);\n\n return {\n inFence: true,\n safeContent,\n completeFence: null,\n textAfterFence: \"\",\n };\n }\n\n // Nothing safe to emit yet\n return {\n inFence: true,\n safeContent: \"\",\n completeFence: null,\n textAfterFence: \"\",\n };\n }\n\n // Found fence end!\n const fenceContent = this.buffer.slice(0, closingIdx);\n this.fenceStartBuffer += fenceContent;\n\n // Reconstruct complete fence\n const completeFence = `${this.FENCE_STARTS[0]}\\n${this.fenceStartBuffer}\\n${this.FENCE_END}`;\n\n // Get text after fence\n const textAfterFence = this.buffer.slice(\n closingIdx + this.FENCE_END.length,\n );\n\n // Reset state\n this.inFence = false;\n this.fenceStartBuffer = \"\";\n this.buffer = textAfterFence;\n\n return {\n inFence: false,\n safeContent: fenceContent, // Emit the last bit of fence content\n completeFence,\n textAfterFence,\n };\n }\n\n /**\n * Check if currently inside a fence\n */\n isInFence(): boolean {\n return this.inFence;\n }\n\n /**\n * Reset streaming state\n */\n resetStreamingState(): void {\n this.inFence = false;\n this.fenceStartBuffer = \"\";\n }\n}\n","import {\r\n LanguageModelV2,\r\n LanguageModelV2CallOptions,\r\n LanguageModelV2CallWarning,\r\n LanguageModelV2Content,\r\n LanguageModelV2FinishReason,\r\n LanguageModelV2ProviderDefinedTool,\r\n LanguageModelV2StreamPart,\r\n LanguageModelV2ToolCall,\r\n LoadSettingError,\r\n JSONValue,\r\n} from \"@ai-sdk/provider\";\r\nimport { convertToBuiltInAIMessages } from \"./convert-to-built-in-ai-messages\";\r\nimport {\r\n buildJsonToolSystemPrompt,\r\n parseJsonFunctionCalls,\r\n} from \"./tool-calling\";\r\nimport type { ParsedToolCall } from \"./tool-calling\";\r\nimport {\r\n gatherUnsupportedSettingWarnings,\r\n createUnsupportedToolWarning,\r\n} from \"./utils/warnings\";\r\nimport {\r\n hasMultimodalContent,\r\n getExpectedInputs,\r\n prependSystemPromptToMessages,\r\n} from \"./utils/prompt-utils\";\r\nimport { isFunctionTool } from \"./utils/tool-utils\";\r\nimport { SessionManager } from \"./models/session-manager\";\r\nimport { ToolCallFenceDetector } from \"./streaming/tool-call-detector\";\r\n\r\nexport type BuiltInAIChatModelId = \"text\";\r\n\r\nexport interface BuiltInAIChatSettings extends LanguageModelCreateOptions {\r\n /**\r\n * Expected input types for the session, for multimodal inputs.\r\n */\r\n expectedInputs?: Array<{\r\n type: \"text\" | \"image\" | \"audio\";\r\n languages?: string[];\r\n }>;\r\n}\r\n\r\n/**\r\n * Check if the browser supports the built-in AI API\r\n * @returns true if the browser supports the built-in AI API, false otherwise\r\n */\r\nexport function doesBrowserSupportBuiltInAI(): boolean {\r\n return typeof LanguageModel !== \"undefined\";\r\n}\r\n\r\n/**\r\n * Check if the Prompt API is available\r\n * @deprecated Use `doesBrowserSupportBuiltInAI()` instead for clearer naming\r\n * @returns true if the browser supports the built-in AI API, false otherwise\r\n */\r\nexport function isBuiltInAIModelAvailable(): boolean {\r\n return typeof LanguageModel !== \"undefined\";\r\n}\r\n\r\ntype BuiltInAIConfig = {\r\n provider: string;\r\n modelId: BuiltInAIChatModelId;\r\n options: BuiltInAIChatSettings;\r\n};\r\n\r\n/**\r\n * Extract tool name from partial fence content for early emission\r\n * This allows us to emit tool-input-start as soon as we know the tool name\r\n * Expects JSON format: {\"name\":\"toolName\"\r\n */\r\nfunction extractToolName(content: string): string | null {\r\n // For JSON mode: {\"name\":\"toolName\"\r\n const jsonMatch = content.match(/\\{\\s*\"name\"\\s*:\\s*\"([^\"]+)\"/);\r\n if (jsonMatch) {\r\n return jsonMatch[1];\r\n }\r\n return null;\r\n}\r\n\r\n/**\r\n * Extract the argument section from a streaming tool call fence.\r\n * Returns the substring after `\"arguments\":` (best-effort for partial JSON).\r\n */\r\nfunction extractArgumentsContent(content: string): string {\r\n const match = content.match(/\"arguments\"\\s*:\\s*/);\r\n if (!match || match.index === undefined) {\r\n return \"\";\r\n }\r\n\r\n const startIndex = match.index + match[0].length;\r\n let result = \"\";\r\n let depth = 0;\r\n let inString = false;\r\n let escaped = false;\r\n let started = false;\r\n\r\n for (let i = startIndex; i < content.length; i++) {\r\n const char = content[i];\r\n result += char;\r\n\r\n if (!started) {\r\n if (!/\\s/.test(char)) {\r\n started = true;\r\n if (char === \"{\" || char === \"[\") {\r\n depth = 1;\r\n }\r\n }\r\n continue;\r\n }\r\n\r\n if (escaped) {\r\n escaped = false;\r\n continue;\r\n }\r\n\r\n if (char === \"\\\\\") {\r\n escaped = true;\r\n continue;\r\n }\r\n\r\n if (char === '\"') {\r\n inString = !inString;\r\n continue;\r\n }\r\n\r\n if (!inString) {\r\n if (char === \"{\" || char === \"[\") {\r\n depth += 1;\r\n } else if (char === \"}\" || char === \"]\") {\r\n if (depth > 0) {\r\n depth -= 1;\r\n if (depth === 0) {\r\n break;\r\n }\r\n }\r\n }\r\n }\r\n }\r\n\r\n return result;\r\n}\r\n\r\nexport class BuiltInAIChatLanguageModel implements LanguageModelV2 {\r\n readonly specificationVersion = \"v2\";\r\n readonly modelId: BuiltInAIChatModelId;\r\n readonly provider = \"browser-ai\";\r\n\r\n private readonly config: BuiltInAIConfig;\r\n private readonly sessionManager: SessionManager;\r\n\r\n constructor(\r\n modelId: BuiltInAIChatModelId,\r\n options: BuiltInAIChatSettings = {},\r\n ) {\r\n this.modelId = modelId;\r\n this.config = {\r\n provider: this.provider,\r\n modelId,\r\n options,\r\n };\r\n this.sessionManager = new SessionManager(options);\r\n }\r\n\r\n readonly supportedUrls: Record<string, RegExp[]> = {\r\n \"image/*\": [/^https?:\\/\\/.+$/],\r\n \"audio/*\": [/^https?:\\/\\/.+$/],\r\n };\r\n\r\n /**\r\n * Gets a session with the specified options\r\n * Delegates to SessionManager for all session lifecycle management\r\n * @private\r\n */\r\n private async getSession(\r\n options?: LanguageModelCreateOptions,\r\n expectedInputs?: Array<{ type: \"text\" | \"image\" | \"audio\" }>,\r\n systemMessage?: string,\r\n onDownloadProgress?: (progress: number) => void,\r\n ): Promise<LanguageModel> {\r\n return this.sessionManager.getSession({\r\n ...options,\r\n expectedInputs,\r\n systemMessage,\r\n onDownloadProgress,\r\n });\r\n }\r\n\r\n private getArgs(callOptions: Parameters<LanguageModelV2[\"doGenerate\"]>[0]) {\r\n const {\r\n prompt,\r\n maxOutputTokens,\r\n temperature,\r\n topP,\r\n topK,\r\n frequencyPenalty,\r\n presencePenalty,\r\n stopSequences,\r\n responseFormat,\r\n seed,\r\n tools,\r\n toolChoice,\r\n providerOptions,\r\n } = callOptions;\r\n const warnings: LanguageModelV2CallWarning[] = [];\r\n\r\n // Gather warnings for unsupported settings\r\n warnings.push(\r\n ...gatherUnsupportedSettingWarnings({\r\n maxOutputTokens,\r\n stopSequences,\r\n topP,\r\n presencePenalty,\r\n frequencyPenalty,\r\n seed,\r\n toolChoice,\r\n }),\r\n );\r\n\r\n // Filter and warn about unsupported tools\r\n const functionTools = (tools ?? []).filter(isFunctionTool);\r\n\r\n const unsupportedTools = (tools ?? []).filter(\r\n (tool): tool is LanguageModelV2ProviderDefinedTool =>\r\n !isFunctionTool(tool),\r\n );\r\n\r\n for (const tool of unsupportedTools) {\r\n warnings.push(\r\n createUnsupportedToolWarning(\r\n tool,\r\n \"Only function tools are supported by the Prompt API polyfill\",\r\n ),\r\n );\r\n }\r\n\r\n // Check if this is a multimodal prompt\r\n const hasMultiModalInput = hasMultimodalContent(prompt);\r\n\r\n // Convert messages to the DOM API format\r\n const { systemMessage, messages } = convertToBuiltInAIMessages(prompt);\r\n\r\n // Handle response format for Prompt API\r\n const promptOptions: LanguageModelPromptOptions &\r\n LanguageModelCreateCoreOptions = {};\r\n if (responseFormat?.type === \"json\") {\r\n promptOptions.responseConstraint = responseFormat.schema as Record<\r\n string,\r\n JSONValue\r\n >;\r\n }\r\n\r\n // Map supported settings\r\n if (temperature !== undefined) {\r\n promptOptions.temperature = temperature;\r\n }\r\n\r\n if (topK !== undefined) {\r\n promptOptions.topK = topK;\r\n }\r\n\r\n return {\r\n systemMessage,\r\n messages,\r\n warnings,\r\n promptOptions,\r\n hasMultiModalInput,\r\n expectedInputs: hasMultiModalInput\r\n ? getExpectedInputs(prompt)\r\n : undefined,\r\n functionTools,\r\n };\r\n }\r\n\r\n /**\r\n * Generates a complete text response using the browser's built-in Prompt API\r\n * @param options\r\n * @returns Promise resolving to the generated content with finish reason, usage stats, and any warnings\r\n * @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded\r\n * @throws {UnsupportedFunctionalityError} When unsupported features like file input are used\r\n */\r\n public async doGenerate(options: LanguageModelV2CallOptions) {\r\n const converted = this.getArgs(options);\r\n const {\r\n systemMessage,\r\n messages,\r\n warnings,\r\n promptOptions,\r\n expectedInputs,\r\n functionTools,\r\n } = converted;\r\n\r\n const session = await this.getSession(undefined, expectedInputs, undefined);\r\n\r\n // Build system prompt with JSON tool calling\r\n const systemPrompt = await buildJsonToolSystemPrompt(\r\n systemMessage,\r\n functionTools,\r\n {\r\n allowParallelToolCalls: false,\r\n },\r\n );\r\n\r\n const promptMessages = prependSystemPromptToMessages(\r\n messages,\r\n systemPrompt,\r\n );\r\n const rawResponse = await session.prompt(promptMessages, promptOptions);\r\n\r\n // Parse JSON tool calls from response\r\n const { toolCalls, textContent } = parseJsonFunctionCalls(rawResponse);\r\n\r\n if (toolCalls.length > 0) {\r\n const toolCallsToEmit = toolCalls.slice(0, 1);\r\n\r\n const parts: LanguageModelV2Content[] = [];\r\n\r\n if (textContent) {\r\n parts.push({\r\n type: \"text\",\r\n text: textContent,\r\n });\r\n }\r\n\r\n for (const call of toolCallsToEmit) {\r\n parts.push({\r\n type: \"tool-call\",\r\n toolCallId: call.toolCallId,\r\n toolName: call.toolName,\r\n input: JSON.stringify(call.args ?? {}),\r\n } satisfies LanguageModelV2ToolCall);\r\n }\r\n\r\n return {\r\n content: parts,\r\n finishReason: \"tool-calls\" as LanguageModelV2FinishReason,\r\n usage: {\r\n inputTokens: undefined,\r\n outputTokens: undefined,\r\n totalTokens: undefined,\r\n },\r\n request: { body: { messages: promptMessages, options: promptOptions } },\r\n warnings,\r\n };\r\n }\r\n\r\n const content: LanguageModelV2Content[] = [\r\n {\r\n type: \"text\",\r\n text: textContent || rawResponse,\r\n },\r\n ];\r\n\r\n return {\r\n content,\r\n finishReason: \"stop\" as LanguageModelV2FinishReason,\r\n usage: {\r\n inputTokens: undefined,\r\n outputTokens: undefined,\r\n totalTokens: undefined,\r\n },\r\n request: { body: { messages: promptMessages, options: promptOptions } },\r\n warnings,\r\n };\r\n }\r\n\r\n /**\r\n * Check the availability of the built-in AI model\r\n * @returns Promise resolving to \"unavailable\", \"available\", or \"available-after-download\"\r\n */\r\n public async availability(): Promise<Availability> {\r\n return this.sessionManager.checkAvailability();\r\n }\r\n\r\n /**\r\n * Creates a session with download progress monitoring.\r\n *\r\n * @example\r\n * ```typescript\r\n * const session = await model.createSessionWithProgress(\r\n * (progress) => {\r\n * console.log(`Download progress: ${Math.round(progress * 100)}%`);\r\n * }\r\n * );\r\n * ```\r\n *\r\n * @param onDownloadProgress Optional callback receiving progress values 0-1 during model download\r\n * @returns Promise resolving to a configured LanguageModel session\r\n * @throws {LoadSettingError} When the Prompt API is not available or model is unavailable\r\n */\r\n public async createSessionWithProgress(\r\n onDownloadProgress?: (progress: number) => void,\r\n ): Promise<LanguageModel> {\r\n return this.sessionManager.createSessionWithProgress(onDownloadProgress);\r\n }\r\n\r\n /**\r\n * Generates a streaming text response using the browser's built-in Prompt API\r\n * @param options\r\n * @returns Promise resolving to a readable stream of text chunks and request metadata\r\n * @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded\r\n * @throws {UnsupportedFunctionalityError} When unsupported features like file input are used\r\n */\r\n public async doStream(options: LanguageModelV2CallOptions) {\r\n const converted = this.getArgs(options);\r\n const {\r\n systemMessage,\r\n messages,\r\n warnings,\r\n promptOptions,\r\n expectedInputs,\r\n functionTools,\r\n } = converted;\r\n\r\n const session = await this.getSession(undefined, expectedInputs, undefined);\r\n\r\n // Build system prompt with JSON tool calling\r\n const systemPrompt = await buildJsonToolSystemPrompt(\r\n systemMessage,\r\n functionTools,\r\n {\r\n allowParallelToolCalls: false,\r\n },\r\n );\r\n const promptMessages = prependSystemPromptToMessages(\r\n messages,\r\n systemPrompt,\r\n );\r\n\r\n // Pass abort signal to the native streaming method\r\n const streamOptions = {\r\n ...promptOptions,\r\n signal: options.abortSignal,\r\n };\r\n const conversationHistory = [...promptMessages];\r\n const textId = \"text-0\";\r\n\r\n const stream = new ReadableStream<LanguageModelV2StreamPart>({\r\n start: async (controller) => {\r\n controller.enqueue({\r\n type: \"stream-start\",\r\n warnings,\r\n });\r\n\r\n let textStarted = false;\r\n let finished = false;\r\n let aborted = false;\r\n let currentReader: ReadableStreamDefaultReader<string> | null = null;\r\n\r\n const ensureTextStart = () => {\r\n if (!textStarted) {\r\n controller.enqueue({\r\n type: \"text-start\",\r\n id: textId,\r\n });\r\n textStarted = true;\r\n }\r\n };\r\n\r\n const emitTextDelta = (delta: string) => {\r\n if (!delta) return;\r\n ensureTextStart();\r\n controller.enqueue({\r\n type: \"text-delta\",\r\n id: textId,\r\n delta,\r\n });\r\n };\r\n\r\n const emitTextEndIfNeeded = () => {\r\n if (!textStarted) return;\r\n controller.enqueue({\r\n type: \"text-end\",\r\n id: textId,\r\n });\r\n textStarted = false;\r\n };\r\n\r\n const finishStream = (finishReason: LanguageModelV2FinishReason) => {\r\n if (finished) return;\r\n finished = true;\r\n emitTextEndIfNeeded();\r\n controller.enqueue({\r\n type: \"finish\",\r\n finishReason,\r\n usage: {\r\n inputTokens: session.inputUsage,\r\n outputTokens: undefined,\r\n totalTokens: undefined,\r\n },\r\n });\r\n controller.close();\r\n };\r\n\r\n const abortHandler = () => {\r\n if (aborted) {\r\n return;\r\n }\r\n aborted = true;\r\n if (currentReader) {\r\n currentReader.cancel().catch(() => undefined);\r\n }\r\n finishStream(\"stop\");\r\n };\r\n\r\n if (options.abortSignal) {\r\n options.abortSignal.addEventListener(\"abort\", abortHandler);\r\n }\r\n\r\n const maxIterations = 10;\r\n let iteration = 0;\r\n\r\n try {\r\n // Use ToolCallFenceDetector for real-time streaming\r\n const fenceDetector = new ToolCallFenceDetector();\r\n\r\n while (iteration < maxIterations && !aborted && !finished) {\r\n iteration += 1;\r\n\r\n const promptStream = session.promptStreaming(\r\n conversationHistory,\r\n streamOptions,\r\n );\r\n currentReader = promptStream.getReader();\r\n\r\n let toolCalls: ParsedToolCall[] = [];\r\n let toolBlockDetected = false;\r\n let trailingTextAfterBlock = \"\";\r\n\r\n // Streaming tool call state\r\n let currentToolCallId: string | null = null;\r\n let toolInputStartEmitted = false;\r\n let accumulatedFenceContent = \"\";\r\n let streamedArgumentsLength = 0;\r\n let insideFence = false;\r\n\r\n while (!aborted) {\r\n const { done, value } = await currentReader.read();\r\n if (done) {\r\n break;\r\n }\r\n\r\n // Add chunk to detector\r\n fenceDetector.addChunk(value);\r\n\r\n // Process buffer using streaming detection\r\n while (fenceDetector.hasContent()) {\r\n const wasInsideFence = insideFence;\r\n const result = fenceDetector.detectStreamingFence();\r\n insideFence = result.inFence;\r\n\r\n let madeProgress = false;\r\n\r\n if (!wasInsideFence && result.inFence) {\r\n if (result.safeContent) {\r\n emitTextDelta(result.safeContent);\r\n madeProgress = true;\r\n }\r\n\r\n currentToolCallId = `call_${Date.now()}_${Math.random()\r\n .toString(36)\r\n .slice(2, 9)}`;\r\n toolInputStartEmitted = false;\r\n accumulatedFenceContent = \"\";\r\n streamedArgumentsLength = 0;\r\n insideFence = true;\r\n\r\n continue;\r\n }\r\n\r\n if (result.completeFence) {\r\n madeProgress = true;\r\n if (result.safeContent) {\r\n accumulatedFenceContent += result.safeContent;\r\n }\r\n\r\n if (toolInputStartEmitted && currentToolCallId) {\r\n const argsContent = extractArgumentsContent(\r\n accumulatedFenceContent,\r\n );\r\n if (argsContent.length > streamedArgumentsLength) {\r\n const delta = argsContent.slice(streamedArgumentsLength);\r\n streamedArgumentsLength = argsContent.length;\r\n if (delta.length > 0) {\r\n controller.enqueue({\r\n type: \"tool-input-delta\",\r\n id: currentToolCallId,\r\n delta,\r\n });\r\n }\r\n }\r\n }\r\n\r\n const parsed = parseJsonFunctionCalls(result.completeFence);\r\n const parsedToolCalls = parsed.toolCalls;\r\n const selectedToolCalls = parsedToolCalls.slice(0, 1);\r\n\r\n if (selectedToolCalls.length === 0) {\r\n toolCalls = [];\r\n toolBlockDetected = false;\r\n emitTextDelta(result.completeFence);\r\n if (result.textAfterFence) {\r\n emitTextDelta(result.textAfterFence);\r\n }\r\n\r\n currentToolCallId = null;\r\n toolInputStartEmitted = false;\r\n accumulatedFenceContent = \"\";\r\n streamedArgumentsLength = 0;\r\n insideFence = false;\r\n continue;\r\n }\r\n\r\n if (selectedToolCalls.length > 0 && currentToolCallId) {\r\n selectedToolCalls[0].toolCallId = currentToolCallId;\r\n }\r\n\r\n toolCalls = selectedToolCalls;\r\n toolBlockDetected = toolCalls.length > 0;\r\n\r\n for (const [index, call] of toolCalls.entries()) {\r\n const toolCallId =\r\n index === 0 && currentToolCallId\r\n ? currentToolCallId\r\n : call.toolCallId;\r\n const toolName = call.toolName;\r\n const argsJson = JSON.stringify(call.args ?? {});\r\n\r\n if (toolCallId === currentToolCallId) {\r\n if (!toolInputStartEmitted) {\r\n controller.enqueue({\r\n type: \"tool-input-start\",\r\n id: toolCallId,\r\n toolName,\r\n });\r\n toolInputStartEmitted = true;\r\n }\r\n\r\n const argsContent = extractArgumentsContent(\r\n accumulatedFenceContent,\r\n );\r\n if (argsContent.length > streamedArgumentsLength) {\r\n const delta = argsContent.slice(\r\n streamedArgumentsLength,\r\n );\r\n streamedArgumentsLength = argsContent.length;\r\n if (delta.length > 0) {\r\n controller.enqueue({\r\n type: \"tool-input-delta\",\r\n id: toolCallId,\r\n delta,\r\n });\r\n }\r\n }\r\n } else {\r\n controller.enqueue({\r\n type: \"tool-input-start\",\r\n id: toolCallId,\r\n toolName,\r\n });\r\n if (argsJson.length > 0) {\r\n controller.enqueue({\r\n type: \"tool-input-delta\",\r\n id: toolCallId,\r\n delta: argsJson,\r\n });\r\n }\r\n }\r\n\r\n controller.enqueue({\r\n type: \"tool-input-end\",\r\n id: toolCallId,\r\n });\r\n controller.enqueue({\r\n type: \"tool-call\",\r\n toolCallId,\r\n toolName,\r\n input: argsJson,\r\n providerExecuted: false,\r\n });\r\n }\r\n\r\n trailingTextAfterBlock += result.textAfterFence;\r\n madeProgress = true;\r\n\r\n if (toolBlockDetected && currentReader) {\r\n await currentReader.cancel().catch(() => undefined);\r\n break;\r\n }\r\n\r\n currentToolCallId = null;\r\n toolInputStartEmitted = false;\r\n accumulatedFenceContent = \"\";\r\n streamedArgumentsLength = 0;\r\n insideFence = false;\r\n continue;\r\n }\r\n\r\n if (insideFence) {\r\n if (result.safeContent) {\r\n accumulatedFenceContent += result.safeContent;\r\n madeProgress = true;\r\n\r\n const toolName = extractToolName(accumulatedFenceContent);\r\n if (\r\n toolName &&\r\n !toolInputStartEmitted &&\r\n currentToolCallId\r\n ) {\r\n controller.enqueue({\r\n type: \"tool-input-start\",\r\n id: currentToolCallId,\r\n toolName,\r\n });\r\n toolInputStartEmitted = true;\r\n }\r\n\r\n if (toolInputStartEmitted && currentToolCallId) {\r\n const argsContent = extractArgumentsContent(\r\n accumulatedFenceContent,\r\n );\r\n if (argsContent.length > streamedArgumentsLength) {\r\n const delta = argsContent.slice(\r\n streamedArgumentsLength,\r\n );\r\n streamedArgumentsLength = argsContent.length;\r\n if (delta.length > 0) {\r\n controller.enqueue({\r\n type: \"tool-input-delta\",\r\n id: currentToolCallId,\r\n delta,\r\n });\r\n }\r\n }\r\n }\r\n }\r\n\r\n continue;\r\n }\r\n\r\n if (!insideFence && result.safeContent) {\r\n emitTextDelta(result.safeContent);\r\n madeProgress = true;\r\n }\r\n\r\n if (!madeProgress) {\r\n break;\r\n }\r\n }\r\n\r\n if (toolBlockDetected) {\r\n break;\r\n }\r\n }\r\n currentReader = null;\r\n\r\n if (aborted) {\r\n return;\r\n }\r\n\r\n // Emit any remaining buffer content if no tool was detected\r\n if (!toolBlockDetected && fenceDetector.hasContent()) {\r\n emitTextDelta(fenceDetector.getBuffer());\r\n fenceDetector.clearBuffer();\r\n }\r\n\r\n if (!toolBlockDetected || toolCalls.length === 0) {\r\n finishStream(\"stop\");\r\n return;\r\n }\r\n\r\n if (trailingTextAfterBlock) {\r\n emitTextDelta(trailingTextAfterBlock);\r\n }\r\n\r\n finishStream(\"tool-calls\");\r\n return;\r\n }\r\n\r\n if (!finished && !aborted) {\r\n finishStream(\"other\");\r\n }\r\n } catch (error) {\r\n controller.enqueue({ type: \"error\", error });\r\n controller.close();\r\n } finally {\r\n if (options.abortSignal) {\r\n options.abortSignal.removeEventListener(\"abort\", abortHandler);\r\n }\r\n }\r\n },\r\n });\r\n\r\n return {\r\n stream,\r\n request: { body: { messages: promptMessages, options: promptOptions } },\r\n };\r\n }\r\n}\r\n","import { EmbeddingModelV2, EmbeddingModelV2Embedding } from \"@ai-sdk/provider\";\r\nimport { TextEmbedder } from \"@mediapipe/tasks-text\";\r\n\r\nexport interface BuiltInAIEmbeddingModelSettings {\r\n /**\r\n * An optional base path to specify the directory the Wasm files should be loaded from.\r\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.js'\r\n */\r\n wasmLoaderPath?: string;\r\n /**\r\n * It's about 6mb before gzip.\r\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.wasm'\r\n */\r\n wasmBinaryPath?: string;\r\n /**\r\n * The model path to the model asset file.\r\n * It's about 6.1mb before gzip.\r\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/universal_sentence_encoder.tflite'\r\n */\r\n modelAssetPath?: string;\r\n /**\r\n * Whether to normalize the returned feature vector with L2 norm. Use this\r\n * option only if the model does not already contain a native L2_NORMALIZATION\r\n * TF Lite Op. In most cases, this is already the case and L2 norm is thus\r\n * achieved through TF Lite inference.\r\n * @default false\r\n */\r\n l2Normalize?: boolean;\r\n /**\r\n * Whether the returned embedding should be quantized to bytes via scalar\r\n * quantization. Embeddings are implicitly assumed to be unit-norm and\r\n * therefore any dimension is guaranteed to have a value in [-1.0, 1.0]. Use\r\n * the l2_normalize option if this is not the case.\r\n * @default false\r\n */\r\n quantize?: boolean;\r\n /**\r\n * Overrides the default backend to use for the provided model.\r\n */\r\n delegate?: \"CPU\" | \"GPU\";\r\n}\r\n\r\n// See more:\r\n// - https://github.com/google-ai-edge/mediapipe\r\n// - https://ai.google.dev/edge/mediapipe/solutions/text/text_embedder/web_js\r\nexport class BuiltInAIEmbeddingModel implements EmbeddingModelV2<string> {\r\n readonly specificationVersion = \"v2\";\r\n readonly provider = \"google-mediapipe\";\r\n readonly modelId: string = \"embedding\";\r\n readonly supportsParallelCalls = true;\r\n readonly maxEmbeddingsPerCall = undefined;\r\n\r\n private settings: BuiltInAIEmbeddingModelSettings = {\r\n wasmLoaderPath:\r\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.js\",\r\n wasmBinaryPath:\r\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.wasm\",\r\n modelAssetPath:\r\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/universal_sentence_encoder.tflite\",\r\n l2Normalize: false,\r\n quantize: false,\r\n };\r\n private modelAssetBuffer!: Promise<ReadableStreamDefaultReader>;\r\n private textEmbedder!: Promise<TextEmbedder>;\r\n\r\n public constructor(settings: BuiltInAIEmbeddingModelSettings = {}) {\r\n this.settings = { ...this.settings, ...settings };\r\n this.modelAssetBuffer = fetch(this.settings.modelAssetPath!).then(\r\n (response) => response.body!.getReader(),\r\n )!;\r\n this.textEmbedder = this.getTextEmbedder();\r\n }\r\n\r\n protected getTextEmbedder = async (): Promise<TextEmbedder> => {\r\n return TextEmbedder.createFromOptions(\r\n {\r\n wasmBinaryPath: this.settings.wasmBinaryPath!,\r\n wasmLoaderPath: this.settings.wasmLoaderPath!,\r\n },\r\n {\r\n baseOptions: {\r\n modelAssetBuffer: await this.modelAssetBuffer,\r\n delegate: this.settings.delegate,\r\n },\r\n l2Normalize: this.settings.l2Normalize,\r\n quantize: this.settings.quantize,\r\n },\r\n );\r\n };\r\n\r\n public doEmbed = async (options: {\r\n values: string[];\r\n abortSignal?: AbortSignal;\r\n }): Promise<{\r\n embeddings: Array<EmbeddingModelV2Embedding>;\r\n rawResponse?: Record<PropertyKey, any>;\r\n }> => {\r\n // Note: abortSignal is not supported by MediaPipe TextEmbedder\r\n if (options.abortSignal?.aborted) {\r\n throw new Error(\"Operation was aborted\");\r\n }\r\n\r\n const embedder = await this.textEmbedder;\r\n const embeddings = options.values.map((text) => {\r\n const embedderResult = embedder.embed(text);\r\n const [embedding] = embedderResult.embeddings;\r\n return embedding?.floatEmbedding ?? [];\r\n });\r\n\r\n return {\r\n embeddings,\r\n rawResponse: {\r\n model: \"universal_sentence_encoder\",\r\n provider: \"google-mediapipe\",\r\n processed_texts: options.values.length,\r\n },\r\n };\r\n };\r\n}\r\n","import {\r\n EmbeddingModelV2,\r\n NoSuchModelError,\r\n ProviderV2,\r\n} from \"@ai-sdk/provider\";\r\nimport {\r\n BuiltInAIChatLanguageModel,\r\n BuiltInAIChatModelId,\r\n BuiltInAIChatSettings,\r\n} from \"./built-in-ai-language-model\";\r\nimport {\r\n BuiltInAIEmbeddingModel,\r\n BuiltInAIEmbeddingModelSettings,\r\n} from \"./built-in-ai-embedding-model\";\r\n\r\nexport interface BuiltInAIProvider extends ProviderV2 {\r\n (\r\n modelId?: BuiltInAIChatModelId,\r\n settings?: BuiltInAIChatSettings,\r\n ): BuiltInAIChatLanguageModel;\r\n\r\n /**\r\n * Creates a model for text generation.\r\n */\r\n languageModel(\r\n modelId: BuiltInAIChatModelId,\r\n settings?: BuiltInAIChatSettings,\r\n ): BuiltInAIChatLanguageModel;\r\n\r\n /**\r\n * Creates a model for text generation.\r\n */\r\n chat(\r\n modelId: BuiltInAIChatModelId,\r\n settings?: BuiltInAIChatSettings,\r\n ): BuiltInAIChatLanguageModel;\r\n\r\n textEmbedding(\r\n modelId: \"embedding\",\r\n settings?: BuiltInAIEmbeddingModelSettings,\r\n ): EmbeddingModelV2<string>;\r\n\r\n textEmbeddingModel: (\r\n modelId: \"embedding\",\r\n settings?: BuiltInAIEmbeddingModelSettings,\r\n ) => EmbeddingModelV2<string>;\r\n\r\n // Not implemented\r\n imageModel(modelId: string): never;\r\n speechModel(modelId: string): never;\r\n transcriptionModel(modelId: string): never;\r\n}\r\n\r\nexport interface BuiltInAIProviderSettings {\r\n // Currently empty - provider settings are minimal for BuiltInAI\r\n // Future provider-level settings can be added here\r\n}\r\n\r\n/**\r\n * Create a BuiltInAI provider instance.\r\n */\r\nexport function createBuiltInAI(\r\n options: BuiltInAIProviderSettings = {},\r\n): BuiltInAIProvider {\r\n const createChatModel = (\r\n modelId: BuiltInAIChatModelId,\r\n settings?: BuiltInAIChatSettings,\r\n ) => {\r\n return new BuiltInAIChatLanguageModel(modelId, settings);\r\n };\r\n\r\n const createEmbeddingModel = (\r\n modelId: \"embedding\",\r\n settings?: BuiltInAIEmbeddingModelSettings,\r\n ) => {\r\n return new BuiltInAIEmbeddingModel(settings);\r\n };\r\n\r\n const provider = function (\r\n modelId: BuiltInAIChatModelId = \"text\",\r\n settings?: BuiltInAIChatSettings,\r\n ) {\r\n if (new.target) {\r\n throw new Error(\r\n \"The BuiltInAI model function cannot be called with the new keyword.\",\r\n );\r\n }\r\n\r\n return createChatModel(modelId, settings);\r\n };\r\n\r\n provider.languageModel = createChatModel;\r\n provider.chat = createChatModel;\r\n provider.textEmbedding = createEmbeddingModel;\r\n provider.textEmbeddingModel = createEmbeddingModel;\r\n\r\n provider.imageModel = (modelId: string) => {\r\n throw new NoSuchModelError({ modelId, modelType: \"imageModel\" });\r\n };\r\n\r\n provider.speechModel = (modelId: string) => {\r\n throw new NoSuchModelError({ modelId, modelType: \"speechModel\" });\r\n };\r\n\r\n provider.transcriptionModel = (modelId: string) => {\r\n throw new NoSuchModelError({ modelId, modelType: \"transcriptionModel\" });\r\n };\r\n\r\n return provider;\r\n}\r\n\r\n/**\r\n * Default BuiltInAI provider instance.\r\n */\r\nexport const builtInAI = createBuiltInAI();\r\n"],"mappings":";AAAA;AAAA,EAKE;AAAA,OACK;;;ACGP,SAAS,mBAAmB,QAA6C;AACvE,QAAM,UAAmC;AAAA,IACvC,MAAM,OAAO;AAAA,IACb,QAAQ,OAAO,UAAU;AAAA,IACzB,OAAO,QAAQ,OAAO,OAAO;AAAA,EAC/B;AAEA,MAAI,OAAO,YAAY;AACrB,YAAQ,KAAK,OAAO;AAAA,EACtB;AAEA,SAAO;AACT;AAkBO,SAAS,kBAAkB,SAA+B;AAC/D,MAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AACpC,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,QAAQ;AAAA,IAAI,CAAC,WAC5B,KAAK,UAAU,mBAAmB,MAAM,CAAC;AAAA,EAC3C;AAEA,SAAO;AAAA,EACP,SAAS,KAAK,IAAI,CAAC;AAAA;AAErB;;;ADjCA,SAAS,0BAA0B,QAA4B;AAC7D,MAAI;AACF,UAAM,eAAe,KAAK,MAAM;AAChC,UAAM,QAAQ,IAAI,WAAW,aAAa,MAAM;AAChD,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,CAAC,IAAI,aAAa,WAAW,CAAC;AAAA,IACtC;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,2CAA2C,KAAK,EAAE;AAAA,EACpE;AACF;AAMA,SAAS,gBACP,MACA,WACqB;AAErB,MAAI,gBAAgB,KAAK;AAEvB,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,gBAAgB,YAAY;AAE9B,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,SAAS,UAAU;AAE5B,WAAO,0BAA0B,IAAI;AAAA,EACvC;AAGA,QAAM,kBAAyB;AAC/B,QAAM,IAAI,MAAM,4BAA4B,SAAS,KAAK,eAAe,EAAE;AAC7E;AAEA,SAAS,uBAAuB,OAAyB;AACvD,MAAI,UAAU,QAAW;AACvB,WAAO,CAAC;AAAA,EACV;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO,SAAS,CAAC;AACnB;AAEA,SAAS,oBAAoB,OAA8C;AACzE,MAAI,CAAC,MAAM,QAAQ;AACjB,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,MAAM,IAAI,CAAC,SAAS;AACnC,UAAM,UAAmC;AAAA,MACvC,MAAM,KAAK;AAAA,MACX,WAAW,uBAAuB,KAAK,KAAK;AAAA,IAC9C;AAEA,QAAI,KAAK,YAAY;AACnB,cAAQ,KAAK,KAAK;AAAA,IACpB;AAEA,WAAO,KAAK,UAAU,OAAO;AAAA,EAC/B,CAAC;AAED,SAAO;AAAA,EACP,SAAS,KAAK,IAAI,CAAC;AAAA;AAErB;AAEA,SAAS,wBAAwB,QAG/B;AACA,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,MAAM;AAAA,IAC/C,KAAK;AACH,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,MAAM;AAAA,IAC/C,KAAK;AACH,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,KAAK;AAAA,IAC9C,KAAK;AACH,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,KAAK;AAAA,IAC9C,KAAK;AACH,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,MAAM;AAAA,IAC/C,SAAS;AACP,YAAM,kBAAyB;AAC/B,aAAO,EAAE,OAAO,iBAAiB,SAAS,MAAM;AAAA,IAClD;AAAA,EACF;AACF;AAEA,SAAS,aAAa,MAAiD;AACrE,QAAM,EAAE,OAAO,QAAQ,IAAI,wBAAwB,KAAK,MAAM;AAC9D,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,UAAU,KAAK;AAAA,IACf,QAAQ;AAAA,IACR;AAAA,EACF;AACF;AAMO,SAAS,2BACd,QACmB;AACnB,QAAM,mBAAmB,OAAO,MAAM;AAEtC,MAAI;AACJ,QAAM,WAAmC,CAAC;AAE1C,aAAW,WAAW,kBAAkB;AACtC,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK,UAAU;AAEb,wBAAgB,QAAQ;AACxB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,QAAQ,IAAI,CAAC,SAAS;AACrC,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,OAAO,KAAK;AAAA,gBACd;AAAA,cACF;AAAA,cAEA,KAAK,QAAQ;AACX,sBAAM,EAAE,WAAW,KAAK,IAAI;AAE5B,oBAAI,WAAW,WAAW,QAAQ,GAAG;AACnC,wBAAM,gBAAgB,gBAAgB,MAAM,SAAS;AAErD,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO;AAAA,kBACT;AAAA,gBACF,WAAW,WAAW,WAAW,QAAQ,GAAG;AAC1C,wBAAM,gBAAgB,gBAAgB,MAAM,SAAS;AAErD,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO;AAAA,kBACT;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,cAAc,SAAS;AAAA,kBACxC,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,kBAAyB;AAC/B,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe,iBAAkB,gBAAsC,QAAQ,SAAS;AAAA,gBAC1F,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAyB;AACzB;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,gBAA+C,CAAC;AAEtD,mBAAW,QAAQ,QAAQ,SAAS;AAClC,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,4BAAc,KAAK,IAAI;AACvB;AAAA,YACF;AAAA,YACA,KAAK,QAAQ;AACX,oBAAM,IAAI,8BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,YACA,KAAK,eAAe;AAClB,oBAAM,IAAI,8BAA8B;AAAA,gBACtC,eACE;AAAA,cACJ,CAAC;AAAA,YACH;AAAA,YACA,SAAS;AACP,oBAAM,kBAAyB;AAC/B,oBAAM,IAAI,8BAA8B;AAAA,gBACtC,eAAe,wBAAyB,gBAAsC,QAAQ,SAAS;AAAA,cACjG,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,cAAM,eAAe,oBAAoB,aAAa;AACtD,cAAM,kBAA4B,CAAC;AAEnC,YAAI,KAAK,KAAK,EAAE,SAAS,GAAG;AAC1B,0BAAgB,KAAK,IAAI;AAAA,QAC3B,WAAW,KAAK,SAAS,GAAG;AAE1B,0BAAgB,KAAK,IAAI;AAAA,QAC3B;AAEA,YAAI,cAAc;AAChB,0BAAgB,KAAK,YAAY;AAAA,QACnC;AAEA,cAAM,UACJ,gBAAgB,SAAS,IAAI,gBAAgB,KAAK,IAAI,IAAI;AAE5D,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN;AAAA,QACF,CAAyB;AACzB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,YAAY,QAAQ;AAC1B,cAAM,UAAwB,UAAU,IAAI,YAAY;AACxD,cAAM,kBAAkB,kBAAkB,OAAO;AAEjD,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAyB;AACzB;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,kBAAyB;AAC/B,cAAM,IAAI;AAAA,UACR,qBAAsB,gBAAsC,QAAQ,SAAS;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,eAAe,SAAS;AACnC;;;AE5QO,SAAS,0BACd,sBACA,OACA,SACQ;AACR,MAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,WAAO,wBAAwB;AAAA,EACjC;AAEA,QAAM,sBACJ;AAEF,QAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,UAAM,SAAS,cAAc,IAAI;AACjC,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,aAAa,KAAK,eAAe;AAAA,MACjC,YAAY,UAAU,EAAE,MAAM,UAAU,YAAY,CAAC,EAAE;AAAA,IACzD;AAAA,EACF,CAAC;AAED,QAAM,YAAY,KAAK,UAAU,aAAa,MAAM,CAAC;AAErD,QAAM,kBAAkB;AAAA;AAAA;AAAA,EAGxB,SAAS;AAAA;AAAA;AAAA,EAGT,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBnB,MAAI,sBAAsB,KAAK,GAAG;AAChC,WAAO,GAAG,qBAAqB,KAAK,CAAC;AAAA;AAAA,EAAO,eAAe;AAAA,EAC7D;AAEA,SAAO;AACT;AASA,SAAS,cACP,MACyB;AACzB,MAAI,gBAAgB,MAAM;AACxB,WAAO,KAAK;AAAA,EACd;AAEA,SAAO,KAAK;AACd;;;AChFA,IAAM,6BAA6B;AAQnC,SAAS,qBAA6B;AACpC,SAAO,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,CAAC,CAAC;AACrE;AAcO,SAAS,uBAAuB,UAAkC;AACvE,QAAM,UAAU,MAAM,KAAK,SAAS,SAAS,0BAA0B,CAAC;AACxE,6BAA2B,YAAY;AAEvC,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,WAAW,CAAC,GAAG,aAAa,SAAS;AAAA,EAChD;AAEA,QAAM,YAA8B,CAAC;AACrC,MAAI,cAAc;AAElB,aAAW,SAAS,SAAS;AAC3B,UAAM,CAAC,WAAW,YAAY,IAAI;AAClC,kBAAc,YAAY,QAAQ,WAAW,EAAE;AAE/C,QAAI;AACF,YAAM,UAAU,aAAa,KAAK;AAGlC,UAAI;AACF,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,cAAM,aAAa,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAE3D,mBAAW,QAAQ,YAAY;AAC7B,cAAI,CAAC,KAAK,KAAM;AAEhB,oBAAU,KAAK;AAAA,YACb,MAAM;AAAA,YACN,YAAY,KAAK,MAAM,mBAAmB;AAAA,YAC1C,UAAU,KAAK;AAAA,YACf,MAAM,KAAK,aAAa,CAAC;AAAA,UAC3B,CAAC;AAAA,QACH;AAAA,MACF,QAAQ;AAEN,cAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC;AAE9D,mBAAW,QAAQ,OAAO;AACxB,cAAI;AACF,kBAAM,OAAO,KAAK,MAAM,KAAK,KAAK,CAAC;AACnC,gBAAI,CAAC,KAAK,KAAM;AAEhB,sBAAU,KAAK;AAAA,cACb,MAAM;AAAA,cACN,YAAY,KAAK,MAAM,mBAAmB;AAAA,cAC1C,UAAU,KAAK;AAAA,cACf,MAAM,KAAK,aAAa,CAAC;AAAA,YAC3B,CAAC;AAAA,UACH,QAAQ;AAEN;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,KAAK,mCAAmC,KAAK;AACrD;AAAA,IACF;AAAA,EACF;AAEA,gBAAc,YAAY,QAAQ,WAAW,IAAI;AAEjD,SAAO,EAAE,WAAW,aAAa,YAAY,KAAK,EAAE;AACtD;;;ACrEO,SAAS,gCACd,SACA,SAC4B;AAC5B,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACF;AAiBO,SAAS,6BACd,MACA,SAC4B;AAC5B,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACF;AAkBO,SAAS,iCAAiC,SAQhB;AAC/B,QAAM,WAAyC,CAAC;AAEhD,MAAI,QAAQ,mBAAmB,MAAM;AACnC,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,iBAAiB,MAAM;AACjC,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,QAAQ,MAAM;AACxB,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,mBAAmB,MAAM;AACnC,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,oBAAoB,MAAM;AACpC,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,QAAQ,MAAM;AACxB,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,cAAc,MAAM;AAC9B,aAAS;AAAA,MACP;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC5IO,SAAS,qBAAqB,QAAwC;AAC3E,aAAW,WAAW,QAAQ;AAC5B,QAAI,QAAQ,SAAS,QAAQ;AAC3B,iBAAW,QAAQ,QAAQ,SAAS;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAgBO,SAAS,kBACd,QAC6C;AAC7C,QAAM,SAAS,oBAAI,IAAgC;AAGnD,aAAW,WAAW,QAAQ;AAC5B,QAAI,QAAQ,SAAS,QAAQ;AAC3B,iBAAW,QAAQ,QAAQ,SAAS;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,cAAI,KAAK,WAAW,WAAW,QAAQ,GAAG;AACxC,mBAAO,IAAI,OAAO;AAAA,UACpB,WAAW,KAAK,WAAW,WAAW,QAAQ,GAAG;AAC/C,mBAAO,IAAI,OAAO;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,KAAK,EAAE;AACpD;AAmBO,SAAS,8BACd,UACA,cACwB;AACxB,MAAI,CAAC,aAAa,KAAK,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,SAAS,IAAI,CAAC,aAAa,EAAE,GAAG,QAAQ,EAAE;AAC1D,QAAM,iBAAiB,QAAQ;AAAA,IAC7B,CAAC,YAAY,QAAQ,SAAS;AAAA,EAChC;AAEA,MAAI,mBAAmB,IAAI;AACzB,UAAM,mBAAmB,QAAQ,cAAc;AAE/C,QAAI,MAAM,QAAQ,iBAAiB,OAAO,GAAG;AAC3C,YAAM,UAAU,iBAAiB,QAAQ,MAAM;AAC/C,cAAQ,QAAQ;AAAA,QACd,MAAM;AAAA,QACN,OAAO,GAAG,YAAY;AAAA;AAAA;AAAA,MACxB,CAAC;AACD,cAAQ,cAAc,IAAI;AAAA,QACxB,GAAG;AAAA,QACH;AAAA,MACF;AAAA,IACF,WAAW,OAAO,iBAAiB,YAAY,UAAU;AACvD,cAAQ,cAAc,IAAI;AAAA,QACxB,GAAG;AAAA,QACH,SAAS,GAAG,YAAY;AAAA;AAAA,EAAO,iBAAiB,OAAO;AAAA,MACzD;AAAA,IACF;AAAA,EACF,OAAO;AACL,YAAQ,QAAQ;AAAA,MACd,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,SAAO;AACT;;;ACxGO,SAAS,eACd,MACqC;AACrC,SAAO,KAAK,SAAS;AACvB;;;ACdA,SAAS,wBAAwB;AA6C1B,IAAM,iBAAN,MAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAS1B,YACE,aACA;AAVF,SAAQ,UAAgC;AAYtC,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,WAAW,SAAwD;AAEvE,QAAI,OAAO,kBAAkB,aAAa;AACxC,YAAM,IAAI,iBAAiB;AAAA,QACzB,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAGA,QAAI,KAAK,SAAS;AAChB,aAAO,KAAK;AAAA,IACd;AAGA,UAAM,eAAe,MAAM,cAAc,aAAa;AACtD,QAAI,iBAAiB,eAAe;AAClC,YAAM,IAAI,iBAAiB;AAAA,QACzB,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,KAAK,sBAAsB,OAAO;AAGzD,SAAK,UAAU,MAAM,cAAc,OAAO,cAAc;AAExD,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,0BACJ,oBACwB;AACxB,WAAO,KAAK,WAAW,EAAE,mBAAmB,CAAC;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,oBAA2C;AAC/C,QAAI,OAAO,kBAAkB,aAAa;AACxC,aAAO;AAAA,IACT;AACA,WAAO,cAAc,aAAa;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,oBAA0C;AACxC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAuB;AACrB,QAAI,KAAK,WAAW,OAAO,KAAK,QAAQ,YAAY,YAAY;AAC9D,WAAK,QAAQ,QAAQ;AAAA,IACvB;AACA,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,sBACN,SAC4B;AAE5B,UAAM,gBAC6B,EAAE,GAAG,KAAK,YAAY;AAGzD,QAAI,SAAS;AACX,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,QACA,GAAG;AAAA,MACL,IAAI;AAGJ,aAAO,OAAO,eAAe,aAAa;AAG1C,UAAI,eAAe;AACjB,sBAAc,iBAAiB;AAAA,UAC7B,EAAE,MAAM,UAAU,SAAS,cAAc;AAAA,QAC3C;AAAA,MACF;AAGA,UAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,sBAAc,iBAAiB;AAAA,MACjC;AAGA,UAAI,oBAAoB;AACtB,sBAAc,UAAU,CAAC,MAAqB;AAC5C,YAAE,iBAAiB,oBAAoB,CAAC,MAAqB;AAC3D,+BAAmB,EAAE,MAAM;AAAA,UAC7B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,SAAK,gBAAgB,aAAa;AAElC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,gBACN,SACM;AAAA,EAER;AACF;;;ACnMO,IAAM,wBAAN,MAA4B;AAAA,EAA5B;AACL,SAAiB,eAAe,CAAC,cAAc;AAC/C,SAAiB,YAAY;AAC7B,SAAQ,SAAS;AAGjB;AAAA,SAAQ,UAAU;AAClB,SAAQ,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO3B,SAAS,OAAqB;AAC5B,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAoB;AAClB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,cAAoB;AAClB,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,cAAoC;AAClC,UAAM,EAAE,OAAO,UAAU,QAAQ,cAAc,IAAI,KAAK;AAAA,MACtD,KAAK;AAAA,IACP;AAGA,QAAI,aAAa,IAAI;AAEnB,YAAM,UAAU,KAAK,qBAAqB,KAAK,QAAQ,KAAK,YAAY;AACxE,YAAM,iBAAiB,KAAK,OAAO,SAAS;AAE5C,YAAMA,cACJ,iBAAiB,IAAI,KAAK,OAAO,MAAM,GAAG,cAAc,IAAI;AAC9D,YAAM,YAAY,UAAU,IAAI,KAAK,OAAO,MAAM,CAAC,OAAO,IAAI;AAG9D,WAAK,SAAS;AAEd,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAAA;AAAA,QACA,eAAe;AAAA,QACf,eAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,aAAa,KAAK,OAAO,MAAM,GAAG,QAAQ;AAChD,SAAK,SAAS,KAAK,OAAO,MAAM,QAAQ;AAGxC,UAAM,eAAe,eAAe,UAAU;AAC9C,UAAM,aAAa,KAAK,OAAO,QAAQ,KAAK,WAAW,YAAY;AAGnE,QAAI,eAAe,IAAI;AAErB,aAAO;AAAA,QACL,OAAO;AAAA,QACP;AAAA,QACA,eAAe;AAAA,QACf,eAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,SAAS,aAAa,KAAK,UAAU;AAC3C,UAAM,QAAQ,KAAK,OAAO,MAAM,GAAG,MAAM;AACzC,UAAM,gBAAgB,KAAK,OAAO,MAAM,MAAM;AAG9C,SAAK,SAAS;AAEd,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,eAAe;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,eAAe,MAGrB;AACA,QAAI,YAAY;AAChB,QAAI,gBAA+B;AAEnC,eAAW,UAAU,KAAK,cAAc;AACtC,YAAM,MAAM,KAAK,QAAQ,MAAM;AAC/B,UAAI,QAAQ,OAAO,cAAc,MAAM,MAAM,YAAY;AACvD,oBAAY;AACZ,wBAAgB;AAAA,MAClB;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,WAAW,QAAQ,cAAc;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0BQ,qBAAqB,MAAc,UAA4B;AACrE,QAAI,UAAU;AAEd,eAAW,UAAU,UAAU;AAC7B,YAAM,YAAY,KAAK,IAAI,KAAK,QAAQ,OAAO,SAAS,CAAC;AAEzD,eAAS,OAAO,WAAW,OAAO,GAAG,QAAQ,GAAG;AAE9C,YAAI,OAAO,WAAW,KAAK,MAAM,CAAC,IAAI,CAAC,GAAG;AACxC,oBAAU,KAAK,IAAI,SAAS,IAAI;AAChC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK,OAAO,SAAS;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,uBAA6C;AAC3C,QAAI,CAAC,KAAK,SAAS;AAEjB,YAAM,EAAE,OAAO,UAAU,QAAQ,cAAc,IAAI,KAAK;AAAA,QACtD,KAAK;AAAA,MACP;AAEA,UAAI,aAAa,IAAI;AAEnB,cAAM,UAAU,KAAK;AAAA,UACnB,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AACA,cAAM,iBAAiB,KAAK,OAAO,SAAS;AAC5C,cAAM,cACJ,iBAAiB,IAAI,KAAK,OAAO,MAAM,GAAG,cAAc,IAAI;AAC9D,aAAK,SAAS,KAAK,OAAO,MAAM,cAAc;AAE9C,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,eAAe;AAAA,UACf,gBAAgB;AAAA,QAClB;AAAA,MACF;AAGA,YAAM,aAAa,KAAK,OAAO,MAAM,GAAG,QAAQ;AAChD,YAAM,mBAAmB,eAAe,UAAU;AAGlD,WAAK,SAAS,KAAK,OAAO,MAAM,WAAW,gBAAgB;AAG3D,UAAI,KAAK,OAAO,WAAW,IAAI,GAAG;AAChC,aAAK,SAAS,KAAK,OAAO,MAAM,CAAC;AAAA,MACnC;AAEA,WAAK,UAAU;AACf,WAAK,mBAAmB;AAExB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,aAAa;AAAA;AAAA,QACb,eAAe;AAAA,QACf,gBAAgB;AAAA,MAClB;AAAA,IACF;AAGA,UAAM,aAAa,KAAK,OAAO,QAAQ,KAAK,SAAS;AAErD,QAAI,eAAe,IAAI;AAErB,YAAM,UAAU,KAAK,qBAAqB,KAAK,QAAQ,CAAC,KAAK,SAAS,CAAC;AACvE,YAAM,oBAAoB,KAAK,OAAO,SAAS;AAE/C,UAAI,oBAAoB,GAAG;AACzB,cAAM,cAAc,KAAK,OAAO,MAAM,GAAG,iBAAiB;AAC1D,aAAK,oBAAoB;AACzB,aAAK,SAAS,KAAK,OAAO,MAAM,iBAAiB;AAEjD,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,eAAe;AAAA,UACf,gBAAgB;AAAA,QAClB;AAAA,MACF;AAGA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,aAAa;AAAA,QACb,eAAe;AAAA,QACf,gBAAgB;AAAA,MAClB;AAAA,IACF;AAGA,UAAM,eAAe,KAAK,OAAO,MAAM,GAAG,UAAU;AACpD,SAAK,oBAAoB;AAGzB,UAAM,gBAAgB,GAAG,KAAK,aAAa,CAAC,CAAC;AAAA,EAAK,KAAK,gBAAgB;AAAA,EAAK,KAAK,SAAS;AAG1F,UAAM,iBAAiB,KAAK,OAAO;AAAA,MACjC,aAAa,KAAK,UAAU;AAAA,IAC9B;AAGA,SAAK,UAAU;AACf,SAAK,mBAAmB;AACxB,SAAK,SAAS;AAEd,WAAO;AAAA,MACL,SAAS;AAAA,MACT,aAAa;AAAA;AAAA,MACb;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqB;AACnB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,sBAA4B;AAC1B,SAAK,UAAU;AACf,SAAK,mBAAmB;AAAA,EAC1B;AACF;;;ACjUO,SAAS,8BAAuC;AACrD,SAAO,OAAO,kBAAkB;AAClC;AAOO,SAAS,4BAAqC;AACnD,SAAO,OAAO,kBAAkB;AAClC;AAaA,SAAS,gBAAgB,SAAgC;AAEvD,QAAM,YAAY,QAAQ,MAAM,6BAA6B;AAC7D,MAAI,WAAW;AACb,WAAO,UAAU,CAAC;AAAA,EACpB;AACA,SAAO;AACT;AAMA,SAAS,wBAAwB,SAAyB;AACxD,QAAM,QAAQ,QAAQ,MAAM,oBAAoB;AAChD,MAAI,CAAC,SAAS,MAAM,UAAU,QAAW;AACvC,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,MAAM,QAAQ,MAAM,CAAC,EAAE;AAC1C,MAAI,SAAS;AACb,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,UAAU;AAEd,WAAS,IAAI,YAAY,IAAI,QAAQ,QAAQ,KAAK;AAChD,UAAM,OAAO,QAAQ,CAAC;AACtB,cAAU;AAEV,QAAI,CAAC,SAAS;AACZ,UAAI,CAAC,KAAK,KAAK,IAAI,GAAG;AACpB,kBAAU;AACV,YAAI,SAAS,OAAO,SAAS,KAAK;AAChC,kBAAQ;AAAA,QACV;AAAA,MACF;AACA;AAAA,IACF;AAEA,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AAEA,QAAI,SAAS,MAAM;AACjB,gBAAU;AACV;AAAA,IACF;AAEA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AAEA,QAAI,CAAC,UAAU;AACb,UAAI,SAAS,OAAO,SAAS,KAAK;AAChC,iBAAS;AAAA,MACX,WAAW,SAAS,OAAO,SAAS,KAAK;AACvC,YAAI,QAAQ,GAAG;AACb,mBAAS;AACT,cAAI,UAAU,GAAG;AACf;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,6BAAN,MAA4D;AAAA,EAQjE,YACE,SACA,UAAiC,CAAC,GAClC;AAVF,SAAS,uBAAuB;AAEhC,SAAS,WAAW;AAkBpB,SAAS,gBAA0C;AAAA,MACjD,WAAW,CAAC,iBAAiB;AAAA,MAC7B,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AAZE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,MACZ,UAAU,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AACA,SAAK,iBAAiB,IAAI,eAAe,OAAO;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAc,WACZ,SACA,gBACA,eACA,oBACwB;AACxB,WAAO,KAAK,eAAe,WAAW;AAAA,MACpC,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEQ,QAAQ,aAA2D;AACzE,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AACJ,UAAM,WAAyC,CAAC;AAGhD,aAAS;AAAA,MACP,GAAG,iCAAiC;AAAA,QAClC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,SAAS,CAAC,GAAG,OAAO,cAAc;AAEzD,UAAM,oBAAoB,SAAS,CAAC,GAAG;AAAA,MACrC,CAAC,SACC,CAAC,eAAe,IAAI;AAAA,IACxB;AAEA,eAAW,QAAQ,kBAAkB;AACnC,eAAS;AAAA,QACP;AAAA,UACE;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,qBAAqB,qBAAqB,MAAM;AAGtD,UAAM,EAAE,eAAe,SAAS,IAAI,2BAA2B,MAAM;AAGrE,UAAM,gBAC6B,CAAC;AACpC,QAAI,gBAAgB,SAAS,QAAQ;AACnC,oBAAc,qBAAqB,eAAe;AAAA,IAIpD;AAGA,QAAI,gBAAgB,QAAW;AAC7B,oBAAc,cAAc;AAAA,IAC9B;AAEA,QAAI,SAAS,QAAW;AACtB,oBAAc,OAAO;AAAA,IACvB;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,gBAAgB,qBACZ,kBAAkB,MAAM,IACxB;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAa,WAAW,SAAqC;AAC3D,UAAM,YAAY,KAAK,QAAQ,OAAO;AACtC,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAEJ,UAAM,UAAU,MAAM,KAAK,WAAW,QAAW,gBAAgB,MAAS;AAG1E,UAAM,eAAe,MAAM;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,QACE,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA;AAAA,IACF;AACA,UAAM,cAAc,MAAM,QAAQ,OAAO,gBAAgB,aAAa;AAGtE,UAAM,EAAE,WAAW,YAAY,IAAI,uBAAuB,WAAW;AAErE,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,kBAAkB,UAAU,MAAM,GAAG,CAAC;AAE5C,YAAM,QAAkC,CAAC;AAEzC,UAAI,aAAa;AACf,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAEA,iBAAW,QAAQ,iBAAiB;AAClC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,UAAU,KAAK;AAAA,UACf,OAAO,KAAK,UAAU,KAAK,QAAQ,CAAC,CAAC;AAAA,QACvC,CAAmC;AAAA,MACrC;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,cAAc;AAAA,QACd,OAAO;AAAA,UACL,aAAa;AAAA,UACb,cAAc;AAAA,UACd,aAAa;AAAA,QACf;AAAA,QACA,SAAS,EAAE,MAAM,EAAE,UAAU,gBAAgB,SAAS,cAAc,EAAE;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAoC;AAAA,MACxC;AAAA,QACE,MAAM;AAAA,QACN,MAAM,eAAe;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc;AAAA,MACd,OAAO;AAAA,QACL,aAAa;AAAA,QACb,cAAc;AAAA,QACd,aAAa;AAAA,MACf;AAAA,MACA,SAAS,EAAE,MAAM,EAAE,UAAU,gBAAgB,SAAS,cAAc,EAAE;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,eAAsC;AACjD,WAAO,KAAK,eAAe,kBAAkB;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAa,0BACX,oBACwB;AACxB,WAAO,KAAK,eAAe,0BAA0B,kBAAkB;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAa,SAAS,SAAqC;AACzD,UAAM,YAAY,KAAK,QAAQ,OAAO;AACtC,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAEJ,UAAM,UAAU,MAAM,KAAK,WAAW,QAAW,gBAAgB,MAAS;AAG1E,UAAM,eAAe,MAAM;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,QACE,wBAAwB;AAAA,MAC1B;AAAA,IACF;AACA,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB,GAAG;AAAA,MACH,QAAQ,QAAQ;AAAA,IAClB;AACA,UAAM,sBAAsB,CAAC,GAAG,cAAc;AAC9C,UAAM,SAAS;AAEf,UAAM,SAAS,IAAI,eAA0C;AAAA,MAC3D,OAAO,OAAO,eAAe;AAC3B,mBAAW,QAAQ;AAAA,UACjB,MAAM;AAAA,UACN;AAAA,QACF,CAAC;AAED,YAAI,cAAc;AAClB,YAAI,WAAW;AACf,YAAI,UAAU;AACd,YAAI,gBAA4D;AAEhE,cAAM,kBAAkB,MAAM;AAC5B,cAAI,CAAC,aAAa;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,IAAI;AAAA,YACN,CAAC;AACD,0BAAc;AAAA,UAChB;AAAA,QACF;AAEA,cAAM,gBAAgB,CAAC,UAAkB;AACvC,cAAI,CAAC,MAAO;AACZ,0BAAgB;AAChB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI;AAAA,YACJ;AAAA,UACF,CAAC;AAAA,QACH;AAEA,cAAM,sBAAsB,MAAM;AAChC,cAAI,CAAC,YAAa;AAClB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI;AAAA,UACN,CAAC;AACD,wBAAc;AAAA,QAChB;AAEA,cAAM,eAAe,CAAC,iBAA8C;AAClE,cAAI,SAAU;AACd,qBAAW;AACX,8BAAoB;AACpB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,YACA,OAAO;AAAA,cACL,aAAa,QAAQ;AAAA,cACrB,cAAc;AAAA,cACd,aAAa;AAAA,YACf;AAAA,UACF,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAEA,cAAM,eAAe,MAAM;AACzB,cAAI,SAAS;AACX;AAAA,UACF;AACA,oBAAU;AACV,cAAI,eAAe;AACjB,0BAAc,OAAO,EAAE,MAAM,MAAM,MAAS;AAAA,UAC9C;AACA,uBAAa,MAAM;AAAA,QACrB;AAEA,YAAI,QAAQ,aAAa;AACvB,kBAAQ,YAAY,iBAAiB,SAAS,YAAY;AAAA,QAC5D;AAEA,cAAM,gBAAgB;AACtB,YAAI,YAAY;AAEhB,YAAI;AAEF,gBAAM,gBAAgB,IAAI,sBAAsB;AAEhD,iBAAO,YAAY,iBAAiB,CAAC,WAAW,CAAC,UAAU;AACzD,yBAAa;AAEb,kBAAM,eAAe,QAAQ;AAAA,cAC3B;AAAA,cACA;AAAA,YACF;AACA,4BAAgB,aAAa,UAAU;AAEvC,gBAAI,YAA8B,CAAC;AACnC,gBAAI,oBAAoB;AACxB,gBAAI,yBAAyB;AAG7B,gBAAI,oBAAmC;AACvC,gBAAI,wBAAwB;AAC5B,gBAAI,0BAA0B;AAC9B,gBAAI,0BAA0B;AAC9B,gBAAI,cAAc;AAElB,mBAAO,CAAC,SAAS;AACf,oBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,cAAc,KAAK;AACjD,kBAAI,MAAM;AACR;AAAA,cACF;AAGA,4BAAc,SAAS,KAAK;AAG5B,qBAAO,cAAc,WAAW,GAAG;AACjC,sBAAM,iBAAiB;AACvB,sBAAM,SAAS,cAAc,qBAAqB;AAClD,8BAAc,OAAO;AAErB,oBAAI,eAAe;AAEnB,oBAAI,CAAC,kBAAkB,OAAO,SAAS;AACrC,sBAAI,OAAO,aAAa;AACtB,kCAAc,OAAO,WAAW;AAChC,mCAAe;AAAA,kBACjB;AAEA,sCAAoB,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EACnD,SAAS,EAAE,EACX,MAAM,GAAG,CAAC,CAAC;AACd,0CAAwB;AACxB,4CAA0B;AAC1B,4CAA0B;AAC1B,gCAAc;AAEd;AAAA,gBACF;AAEA,oBAAI,OAAO,eAAe;AACxB,iCAAe;AACf,sBAAI,OAAO,aAAa;AACtB,+CAA2B,OAAO;AAAA,kBACpC;AAEA,sBAAI,yBAAyB,mBAAmB;AAC9C,0BAAM,cAAc;AAAA,sBAClB;AAAA,oBACF;AACA,wBAAI,YAAY,SAAS,yBAAyB;AAChD,4BAAM,QAAQ,YAAY,MAAM,uBAAuB;AACvD,gDAA0B,YAAY;AACtC,0BAAI,MAAM,SAAS,GAAG;AACpB,mCAAW,QAAQ;AAAA,0BACjB,MAAM;AAAA,0BACN,IAAI;AAAA,0BACJ;AAAA,wBACF,CAAC;AAAA,sBACH;AAAA,oBACF;AAAA,kBACF;AAEA,wBAAM,SAAS,uBAAuB,OAAO,aAAa;AAC1D,wBAAM,kBAAkB,OAAO;AAC/B,wBAAM,oBAAoB,gBAAgB,MAAM,GAAG,CAAC;AAEpD,sBAAI,kBAAkB,WAAW,GAAG;AAClC,gCAAY,CAAC;AACb,wCAAoB;AACpB,kCAAc,OAAO,aAAa;AAClC,wBAAI,OAAO,gBAAgB;AACzB,oCAAc,OAAO,cAAc;AAAA,oBACrC;AAEA,wCAAoB;AACpB,4CAAwB;AACxB,8CAA0B;AAC1B,8CAA0B;AAC1B,kCAAc;AACd;AAAA,kBACF;AAEA,sBAAI,kBAAkB,SAAS,KAAK,mBAAmB;AACrD,sCAAkB,CAAC,EAAE,aAAa;AAAA,kBACpC;AAEA,8BAAY;AACZ,sCAAoB,UAAU,SAAS;AAEvC,6BAAW,CAAC,OAAO,IAAI,KAAK,UAAU,QAAQ,GAAG;AAC/C,0BAAM,aACJ,UAAU,KAAK,oBACX,oBACA,KAAK;AACX,0BAAM,WAAW,KAAK;AACtB,0BAAM,WAAW,KAAK,UAAU,KAAK,QAAQ,CAAC,CAAC;AAE/C,wBAAI,eAAe,mBAAmB;AACpC,0BAAI,CAAC,uBAAuB;AAC1B,mCAAW,QAAQ;AAAA,0BACjB,MAAM;AAAA,0BACN,IAAI;AAAA,0BACJ;AAAA,wBACF,CAAC;AACD,gDAAwB;AAAA,sBAC1B;AAEA,4BAAM,cAAc;AAAA,wBAClB;AAAA,sBACF;AACA,0BAAI,YAAY,SAAS,yBAAyB;AAChD,8BAAM,QAAQ,YAAY;AAAA,0BACxB;AAAA,wBACF;AACA,kDAA0B,YAAY;AACtC,4BAAI,MAAM,SAAS,GAAG;AACpB,qCAAW,QAAQ;AAAA,4BACjB,MAAM;AAAA,4BACN,IAAI;AAAA,4BACJ;AAAA,0BACF,CAAC;AAAA,wBACH;AAAA,sBACF;AAAA,oBACF,OAAO;AACL,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,wBACJ;AAAA,sBACF,CAAC;AACD,0BAAI,SAAS,SAAS,GAAG;AACvB,mCAAW,QAAQ;AAAA,0BACjB,MAAM;AAAA,0BACN,IAAI;AAAA,0BACJ,OAAO;AAAA,wBACT,CAAC;AAAA,sBACH;AAAA,oBACF;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,oBACN,CAAC;AACD,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN;AAAA,sBACA;AAAA,sBACA,OAAO;AAAA,sBACP,kBAAkB;AAAA,oBACpB,CAAC;AAAA,kBACH;AAEA,4CAA0B,OAAO;AACjC,iCAAe;AAEf,sBAAI,qBAAqB,eAAe;AACtC,0BAAM,cAAc,OAAO,EAAE,MAAM,MAAM,MAAS;AAClD;AAAA,kBACF;AAEA,sCAAoB;AACpB,0CAAwB;AACxB,4CAA0B;AAC1B,4CAA0B;AAC1B,gCAAc;AACd;AAAA,gBACF;AAEA,oBAAI,aAAa;AACf,sBAAI,OAAO,aAAa;AACtB,+CAA2B,OAAO;AAClC,mCAAe;AAEf,0BAAM,WAAW,gBAAgB,uBAAuB;AACxD,wBACE,YACA,CAAC,yBACD,mBACA;AACA,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,wBACJ;AAAA,sBACF,CAAC;AACD,8CAAwB;AAAA,oBAC1B;AAEA,wBAAI,yBAAyB,mBAAmB;AAC9C,4BAAM,cAAc;AAAA,wBAClB;AAAA,sBACF;AACA,0BAAI,YAAY,SAAS,yBAAyB;AAChD,8BAAM,QAAQ,YAAY;AAAA,0BACxB;AAAA,wBACF;AACA,kDAA0B,YAAY;AACtC,4BAAI,MAAM,SAAS,GAAG;AACpB,qCAAW,QAAQ;AAAA,4BACjB,MAAM;AAAA,4BACN,IAAI;AAAA,4BACJ;AAAA,0BACF,CAAC;AAAA,wBACH;AAAA,sBACF;AAAA,oBACF;AAAA,kBACF;AAEA;AAAA,gBACF;AAEA,oBAAI,CAAC,eAAe,OAAO,aAAa;AACtC,gCAAc,OAAO,WAAW;AAChC,iCAAe;AAAA,gBACjB;AAEA,oBAAI,CAAC,cAAc;AACjB;AAAA,gBACF;AAAA,cACF;AAEA,kBAAI,mBAAmB;AACrB;AAAA,cACF;AAAA,YACF;AACA,4BAAgB;AAEhB,gBAAI,SAAS;AACX;AAAA,YACF;AAGA,gBAAI,CAAC,qBAAqB,cAAc,WAAW,GAAG;AACpD,4BAAc,cAAc,UAAU,CAAC;AACvC,4BAAc,YAAY;AAAA,YAC5B;AAEA,gBAAI,CAAC,qBAAqB,UAAU,WAAW,GAAG;AAChD,2BAAa,MAAM;AACnB;AAAA,YACF;AAEA,gBAAI,wBAAwB;AAC1B,4BAAc,sBAAsB;AAAA,YACtC;AAEA,yBAAa,YAAY;AACzB;AAAA,UACF;AAEA,cAAI,CAAC,YAAY,CAAC,SAAS;AACzB,yBAAa,OAAO;AAAA,UACtB;AAAA,QACF,SAAS,OAAO;AACd,qBAAW,QAAQ,EAAE,MAAM,SAAS,MAAM,CAAC;AAC3C,qBAAW,MAAM;AAAA,QACnB,UAAE;AACA,cAAI,QAAQ,aAAa;AACvB,oBAAQ,YAAY,oBAAoB,SAAS,YAAY;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,SAAS,EAAE,MAAM,EAAE,UAAU,gBAAgB,SAAS,cAAc,EAAE;AAAA,IACxE;AAAA,EACF;AACF;;;AC7xBA,SAAS,oBAAoB;AA4CtB,IAAM,0BAAN,MAAkE;AAAA,EAoBhE,YAAY,WAA4C,CAAC,GAAG;AAnBnE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AACpB,SAAS,UAAkB;AAC3B,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAEhC,SAAQ,WAA4C;AAAA,MAClD,gBACE;AAAA,MACF,gBACE;AAAA,MACF,gBACE;AAAA,MACF,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAYA,SAAU,kBAAkB,YAAmC;AAC7D,aAAO,aAAa;AAAA,QAClB;AAAA,UACE,gBAAgB,KAAK,SAAS;AAAA,UAC9B,gBAAgB,KAAK,SAAS;AAAA,QAChC;AAAA,QACA;AAAA,UACE,aAAa;AAAA,YACX,kBAAkB,MAAM,KAAK;AAAA,YAC7B,UAAU,KAAK,SAAS;AAAA,UAC1B;AAAA,UACA,aAAa,KAAK,SAAS;AAAA,UAC3B,UAAU,KAAK,SAAS;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AAEA,SAAO,UAAU,OAAO,YAMlB;AAEJ,UAAI,QAAQ,aAAa,SAAS;AAChC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA,YAAM,WAAW,MAAM,KAAK;AAC5B,YAAM,aAAa,QAAQ,OAAO,IAAI,CAAC,SAAS;AAC9C,cAAM,iBAAiB,SAAS,MAAM,IAAI;AAC1C,cAAM,CAAC,SAAS,IAAI,eAAe;AACnC,eAAO,WAAW,kBAAkB,CAAC;AAAA,MACvC,CAAC;AAED,aAAO;AAAA,QACL;AAAA,QACA,aAAa;AAAA,UACX,OAAO;AAAA,UACP,UAAU;AAAA,UACV,iBAAiB,QAAQ,OAAO;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AAnDE,SAAK,WAAW,EAAE,GAAG,KAAK,UAAU,GAAG,SAAS;AAChD,SAAK,mBAAmB,MAAM,KAAK,SAAS,cAAe,EAAE;AAAA,MAC3D,CAAC,aAAa,SAAS,KAAM,UAAU;AAAA,IACzC;AACA,SAAK,eAAe,KAAK,gBAAgB;AAAA,EAC3C;AA+CF;;;ACtHA;AAAA,EAEE;AAAA,OAEK;AAyDA,SAAS,gBACd,UAAqC,CAAC,GACnB;AACnB,QAAM,kBAAkB,CACtB,SACA,aACG;AACH,WAAO,IAAI,2BAA2B,SAAS,QAAQ;AAAA,EACzD;AAEA,QAAM,uBAAuB,CAC3B,SACA,aACG;AACH,WAAO,IAAI,wBAAwB,QAAQ;AAAA,EAC7C;AAEA,QAAM,WAAW,SACf,UAAgC,QAChC,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,WAAS,cAAc,CAAC,YAAoB;AAC1C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,cAAc,CAAC;AAAA,EAClE;AAEA,WAAS,qBAAqB,CAAC,YAAoB;AACjD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,SAAO;AACT;AAKO,IAAM,YAAY,gBAAgB;","names":["prefixText"]}
package/package.json ADDED
@@ -0,0 +1,73 @@
1
+ {
2
+ "name": "@browser-ai/core",
3
+ "version": "1.0.0",
4
+ "description": "Browser Built-in AI API provider for Vercel AI SDK (Chrome & Edge)",
5
+ "author": {
6
+ "name": "Jakob Hoeg Mørk",
7
+ "url": "https://jakobhoeg.dev"
8
+ },
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/jakobhoeg/browser-ai.git",
12
+ "directory": "packages/vercel/core"
13
+ },
14
+ "main": "dist/index.js",
15
+ "types": "dist/index.d.ts",
16
+ "exports": {
17
+ ".": {
18
+ "types": "./dist/index.d.ts",
19
+ "import": "./dist/index.js",
20
+ "require": "./dist/index.js"
21
+ }
22
+ },
23
+ "files": [
24
+ "dist/**/*"
25
+ ],
26
+ "sideEffects": false,
27
+ "scripts": {
28
+ "build": "npm run clean && tsup",
29
+ "build:prod": "tsup",
30
+ "build:test": "npm run test:run && tsup",
31
+ "dev": "tsup --watch",
32
+ "clean": "rimraf dist",
33
+ "test": "vitest",
34
+ "test:watch": "vitest --watch",
35
+ "test:coverage": "vitest --coverage",
36
+ "test:run": "vitest run",
37
+ "dev:example": "npm run build && npm run -w examples/next-hybrid dev"
38
+ },
39
+ "keywords": [
40
+ "ai",
41
+ "ai-sdk",
42
+ "vercel",
43
+ "browser-ai",
44
+ "built-in-ai",
45
+ "chrome",
46
+ "edge",
47
+ "prompt-api",
48
+ "language-model",
49
+ "tool-calling",
50
+ "function-calling"
51
+ ],
52
+ "license": "Apache License",
53
+ "dependencies": {
54
+ "@mediapipe/tasks-text": "^0.10.22-rc.20250304"
55
+ },
56
+ "peerDependencies": {
57
+ "ai": "^5.0.0"
58
+ },
59
+ "devDependencies": {
60
+ "@types/dom-chromium-ai": "^0.0.10",
61
+ "@types/node": "^20.0.0",
62
+ "@vitest/coverage-v8": "^1.0.0",
63
+ "jsdom": "^26.1.0",
64
+ "rimraf": "^5.0.0",
65
+ "tsup": "^8.0.0",
66
+ "typescript": "^5.0.0",
67
+ "vitest": "^1.0.0",
68
+ "zod": "^3.25.76"
69
+ },
70
+ "publishConfig": {
71
+ "access": "public"
72
+ }
73
+ }