@providerprotocol/ai 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +84 -0
- package/dist/anthropic/index.d.ts +41 -0
- package/dist/anthropic/index.js +500 -0
- package/dist/anthropic/index.js.map +1 -0
- package/dist/chunk-CUCRF5W6.js +136 -0
- package/dist/chunk-CUCRF5W6.js.map +1 -0
- package/dist/chunk-FTFX2VET.js +424 -0
- package/dist/chunk-FTFX2VET.js.map +1 -0
- package/dist/chunk-QUUX4G7U.js +117 -0
- package/dist/chunk-QUUX4G7U.js.map +1 -0
- package/dist/chunk-Y6Q7JCNP.js +39 -0
- package/dist/chunk-Y6Q7JCNP.js.map +1 -0
- package/dist/google/index.d.ts +69 -0
- package/dist/google/index.js +517 -0
- package/dist/google/index.js.map +1 -0
- package/dist/http/index.d.ts +61 -0
- package/dist/http/index.js +43 -0
- package/dist/http/index.js.map +1 -0
- package/dist/index.d.ts +792 -0
- package/dist/index.js +898 -0
- package/dist/index.js.map +1 -0
- package/dist/openai/index.d.ts +204 -0
- package/dist/openai/index.js +1340 -0
- package/dist/openai/index.js.map +1 -0
- package/dist/provider-CUJWjgNl.d.ts +192 -0
- package/dist/retry-I2661_rv.d.ts +118 -0
- package/package.json +88 -0
- package/src/anthropic/index.ts +3 -0
- package/src/core/image.ts +188 -0
- package/src/core/llm.ts +619 -0
- package/src/core/provider.ts +92 -0
- package/src/google/index.ts +3 -0
- package/src/http/errors.ts +112 -0
- package/src/http/fetch.ts +210 -0
- package/src/http/index.ts +31 -0
- package/src/http/keys.ts +136 -0
- package/src/http/retry.ts +205 -0
- package/src/http/sse.ts +136 -0
- package/src/index.ts +32 -0
- package/src/openai/index.ts +9 -0
- package/src/providers/anthropic/index.ts +17 -0
- package/src/providers/anthropic/llm.ts +196 -0
- package/src/providers/anthropic/transform.ts +452 -0
- package/src/providers/anthropic/types.ts +213 -0
- package/src/providers/google/index.ts +17 -0
- package/src/providers/google/llm.ts +203 -0
- package/src/providers/google/transform.ts +487 -0
- package/src/providers/google/types.ts +214 -0
- package/src/providers/openai/index.ts +151 -0
- package/src/providers/openai/llm.completions.ts +201 -0
- package/src/providers/openai/llm.responses.ts +211 -0
- package/src/providers/openai/transform.completions.ts +628 -0
- package/src/providers/openai/transform.responses.ts +718 -0
- package/src/providers/openai/types.ts +711 -0
- package/src/types/content.ts +133 -0
- package/src/types/errors.ts +85 -0
- package/src/types/index.ts +105 -0
- package/src/types/llm.ts +211 -0
- package/src/types/messages.ts +182 -0
- package/src/types/provider.ts +195 -0
- package/src/types/schema.ts +58 -0
- package/src/types/stream.ts +146 -0
- package/src/types/thread.ts +226 -0
- package/src/types/tool.ts +88 -0
- package/src/types/turn.ts +118 -0
- package/src/utils/id.ts +28 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/providers/openai/transform.completions.ts","../../src/providers/openai/llm.completions.ts","../../src/providers/openai/transform.responses.ts","../../src/providers/openai/llm.responses.ts","../../src/providers/openai/index.ts"],"sourcesContent":["import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAILLMParams,\n OpenAICompletionsRequest,\n OpenAICompletionsMessage,\n OpenAIUserContent,\n OpenAICompletionsTool,\n OpenAICompletionsResponse,\n OpenAICompletionsStreamChunk,\n OpenAIToolCall,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Chat Completions format\n */\nexport function transformRequest<TParams extends OpenAILLMParams>(\n request: LLMRequest<TParams>,\n modelId: string\n): OpenAICompletionsRequest {\n const params: OpenAILLMParams = request.params ?? {};\n\n const openaiRequest: OpenAICompletionsRequest = {\n model: modelId,\n messages: transformMessages(request.messages, request.system),\n };\n\n // Model parameters\n if (params.temperature !== undefined) {\n openaiRequest.temperature = params.temperature;\n }\n if (params.top_p !== undefined) {\n openaiRequest.top_p = params.top_p;\n }\n if (params.max_completion_tokens !== undefined) {\n openaiRequest.max_completion_tokens = params.max_completion_tokens;\n } else if (params.max_tokens !== undefined) {\n openaiRequest.max_tokens = params.max_tokens;\n }\n if (params.frequency_penalty !== undefined) {\n openaiRequest.frequency_penalty = params.frequency_penalty;\n }\n if (params.presence_penalty !== undefined) {\n openaiRequest.presence_penalty = params.presence_penalty;\n }\n if (params.stop !== undefined) {\n openaiRequest.stop = params.stop;\n }\n if (params.n !== undefined) {\n openaiRequest.n = params.n;\n }\n if (params.logprobs !== undefined) {\n openaiRequest.logprobs = params.logprobs;\n }\n if (params.top_logprobs !== undefined) {\n openaiRequest.top_logprobs = params.top_logprobs;\n }\n if (params.seed !== undefined) {\n openaiRequest.seed = params.seed;\n }\n if (params.user !== undefined) {\n openaiRequest.user = params.user;\n }\n if (params.logit_bias !== undefined) {\n openaiRequest.logit_bias = params.logit_bias;\n }\n if (params.reasoning_effort !== undefined) {\n openaiRequest.reasoning_effort = params.reasoning_effort;\n }\n if (params.verbosity !== undefined) {\n openaiRequest.verbosity = params.verbosity;\n }\n if (params.service_tier !== undefined) {\n openaiRequest.service_tier = params.service_tier;\n }\n if (params.store !== undefined) {\n openaiRequest.store = params.store;\n }\n if (params.metadata !== undefined) {\n openaiRequest.metadata = params.metadata;\n }\n if (params.prediction !== undefined) {\n openaiRequest.prediction = params.prediction;\n }\n if (params.prompt_cache_key !== undefined) {\n openaiRequest.prompt_cache_key = params.prompt_cache_key;\n }\n if (params.prompt_cache_retention !== undefined) {\n openaiRequest.prompt_cache_retention = params.prompt_cache_retention;\n }\n if (params.safety_identifier !== undefined) {\n openaiRequest.safety_identifier = params.safety_identifier;\n }\n\n // Tools\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n if (params.parallel_tool_calls !== undefined) {\n openaiRequest.parallel_tool_calls = params.parallel_tool_calls;\n }\n }\n\n // Structured output via response_format\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.response_format = {\n type: 'json_schema',\n json_schema: {\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n } else if (params.response_format !== undefined) {\n // Pass through response_format from params if no structure is defined\n openaiRequest.response_format = params.response_format;\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages including system prompt\n */\nfunction transformMessages(\n messages: Message[],\n system?: string\n): OpenAICompletionsMessage[] {\n const result: OpenAICompletionsMessage[] = [];\n\n // Add system message first if present\n if (system) {\n result.push({\n role: 'system',\n content: system,\n });\n }\n\n // Transform each message\n for (const message of messages) {\n // Handle tool result messages specially - they need to produce multiple messages\n if (isToolResultMessage(message)) {\n const toolMessages = transformToolResults(message);\n result.push(...toolMessages);\n } else {\n const transformed = transformMessage(message);\n if (transformed) {\n result.push(transformed);\n }\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI format\n */\nfunction transformMessage(message: Message): OpenAICompletionsMessage | null {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return {\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n };\n }\n return {\n role: 'user',\n content: validContent.map(transformContentBlock),\n };\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Extract text content\n const textContent = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c) => c.text)\n .join('');\n\n const assistantMessage: OpenAICompletionsMessage = {\n role: 'assistant',\n content: textContent || null,\n };\n\n // Add tool calls if present\n if (message.toolCalls && message.toolCalls.length > 0) {\n (assistantMessage as { tool_calls?: OpenAIToolCall[] }).tool_calls =\n message.toolCalls.map((call) => ({\n id: call.toolCallId,\n type: 'function' as const,\n function: {\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n },\n }));\n }\n\n return assistantMessage;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are sent as individual tool messages\n // Return the first one and handle multiple in a different way\n // Actually, we need to return multiple messages for multiple tool results\n // This is handled by the caller - transform each result to a message\n const results = message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n\n // For now, return the first result - caller should handle multiple\n return results[0] ?? null;\n }\n\n return null;\n}\n\n/**\n * Transform multiple tool results to messages\n */\nexport function transformToolResults(\n message: Message\n): OpenAICompletionsMessage[] {\n if (!isToolResultMessage(message)) {\n const single = transformMessage(message);\n return single ? [single] : [];\n }\n\n return message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n}\n\n/**\n * Transform a content block to OpenAI format\n */\nfunction transformContentBlock(block: ContentBlock): OpenAIUserContent {\n switch (block.type) {\n case 'text':\n return { type: 'text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n let url: string;\n\n if (imageBlock.source.type === 'base64') {\n url = `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`;\n } else if (imageBlock.source.type === 'url') {\n url = imageBlock.source.url;\n } else if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n url = `data:${imageBlock.mimeType};base64,${base64}`;\n } else {\n throw new Error('Unknown image source type');\n }\n\n return {\n type: 'image_url',\n image_url: { url },\n };\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to OpenAI format\n */\nfunction transformTool(tool: Tool): OpenAICompletionsTool {\n return {\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n },\n };\n}\n\n/**\n * Transform OpenAI response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAICompletionsResponse): LLMResponse {\n const choice = data.choices[0];\n if (!choice) {\n throw new Error('No choices in OpenAI response');\n }\n\n // Extract text content\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (choice.message.content) {\n textContent.push({ type: 'text', text: choice.message.content });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(choice.message.content);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n let hadRefusal = false;\n if (choice.message.refusal) {\n textContent.push({ type: 'text', text: choice.message.refusal });\n hadRefusal = true;\n }\n\n // Extract tool calls\n const toolCalls: ToolCall[] = [];\n if (choice.message.tool_calls) {\n for (const call of choice.message.tool_calls) {\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(call.function.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: call.id,\n toolName: call.function.name,\n arguments: args,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n finish_reason: choice.finish_reason,\n system_fingerprint: data.system_fingerprint,\n service_tier: data.service_tier,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.prompt_tokens,\n outputTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (choice.finish_reason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface CompletionsStreamState {\n id: string;\n model: string;\n text: string;\n toolCalls: Map<number, { id: string; name: string; arguments: string }>;\n finishReason: string | null;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): CompletionsStreamState {\n return {\n id: '',\n model: '',\n text: '',\n toolCalls: new Map(),\n finishReason: null,\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI stream chunk to UPP StreamEvent\n * Returns array since one chunk may produce multiple events\n */\nexport function transformStreamEvent(\n chunk: OpenAICompletionsStreamChunk,\n state: CompletionsStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n // Update state with basic info\n if (chunk.id && !state.id) {\n state.id = chunk.id;\n events.push({ type: 'message_start', index: 0, delta: {} });\n }\n if (chunk.model) {\n state.model = chunk.model;\n }\n\n // Process choices\n const choice = chunk.choices[0];\n if (choice) {\n // Text delta\n if (choice.delta.content) {\n state.text += choice.delta.content;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.content },\n });\n }\n if (choice.delta.refusal) {\n state.hadRefusal = true;\n state.text += choice.delta.refusal;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.refusal },\n });\n }\n\n // Tool call deltas\n if (choice.delta.tool_calls) {\n for (const toolCallDelta of choice.delta.tool_calls) {\n const index = toolCallDelta.index;\n let toolCall = state.toolCalls.get(index);\n\n if (!toolCall) {\n toolCall = { id: '', name: '', arguments: '' };\n state.toolCalls.set(index, toolCall);\n }\n\n if (toolCallDelta.id) {\n toolCall.id = toolCallDelta.id;\n }\n if (toolCallDelta.function?.name) {\n toolCall.name = toolCallDelta.function.name;\n }\n if (toolCallDelta.function?.arguments) {\n toolCall.arguments += toolCallDelta.function.arguments;\n events.push({\n type: 'tool_call_delta',\n index: index,\n delta: {\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n argumentsJson: toolCallDelta.function.arguments,\n },\n });\n }\n }\n }\n\n // Finish reason\n if (choice.finish_reason) {\n state.finishReason = choice.finish_reason;\n events.push({ type: 'message_stop', index: 0, delta: {} });\n }\n }\n\n // Usage info (usually comes at the end with stream_options.include_usage)\n if (chunk.usage) {\n state.inputTokens = chunk.usage.prompt_tokens;\n state.outputTokens = chunk.usage.completion_tokens;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: CompletionsStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (state.text) {\n textContent.push({ type: 'text', text: state.text });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(state.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n\n const toolCalls: ToolCall[] = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n toolCalls.push({\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n arguments: args,\n });\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n finish_reason: state.finishReason,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (state.finishReason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (state.hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAILLMParams, OpenAICompletionsResponse, OpenAICompletionsStreamChunk } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.completions.ts';\n\nconst OPENAI_API_URL = 'https://api.openai.com/v1/chat/completions';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Chat Completions LLM handler\n */\nexport function createCompletionsLLMHandler(): LLMHandler<OpenAILLMParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAILLMParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAILLMParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAILLMParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAILLMParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAILLMParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAILLMParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAICompletionsResponse;\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAILLMParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n body.stream_options = { include_usage: true };\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const chunk = data as OpenAICompletionsStreamChunk;\n\n // Check for error in chunk\n if ('error' in chunk && chunk.error) {\n const errorData = chunk.error as { message?: string; type?: string };\n const error = new UPPError(\n errorData.message ?? 'Unknown error',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(chunk, state);\n for (const event of uppEvents) {\n yield event;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAILLMParams,\n OpenAIResponsesRequest,\n OpenAIResponsesInputItem,\n OpenAIResponsesContentPart,\n OpenAIResponsesTool,\n OpenAIResponsesResponse,\n OpenAIResponsesStreamEvent,\n OpenAIResponsesOutputItem,\n OpenAIResponsesMessageOutput,\n OpenAIResponsesFunctionCallOutput,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Responses API format\n */\nexport function transformRequest<TParams extends OpenAILLMParams>(\n request: LLMRequest<TParams>,\n modelId: string\n): OpenAIResponsesRequest {\n const params: OpenAILLMParams = request.params ?? {};\n\n const openaiRequest: OpenAIResponsesRequest = {\n model: modelId,\n input: transformInputItems(request.messages, request.system),\n };\n\n // Model parameters\n if (params.temperature !== undefined) {\n openaiRequest.temperature = params.temperature;\n }\n if (params.top_p !== undefined) {\n openaiRequest.top_p = params.top_p;\n }\n if (params.max_output_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_output_tokens;\n } else if (params.max_completion_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_completion_tokens;\n } else if (params.max_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_tokens;\n }\n if (params.service_tier !== undefined) {\n openaiRequest.service_tier = params.service_tier;\n }\n if (params.store !== undefined) {\n openaiRequest.store = params.store;\n }\n if (params.metadata !== undefined) {\n openaiRequest.metadata = params.metadata;\n }\n if (params.truncation !== undefined) {\n openaiRequest.truncation = params.truncation;\n }\n if (params.include !== undefined) {\n openaiRequest.include = params.include;\n }\n if (params.background !== undefined) {\n openaiRequest.background = params.background;\n }\n if (params.previous_response_id !== undefined) {\n openaiRequest.previous_response_id = params.previous_response_id;\n }\n if (params.reasoning !== undefined) {\n openaiRequest.reasoning = { ...params.reasoning };\n }\n if (params.reasoning_effort !== undefined) {\n openaiRequest.reasoning = {\n ...(openaiRequest.reasoning ?? {}),\n effort: params.reasoning_effort,\n };\n }\n\n // Tools\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n if (params.parallel_tool_calls !== undefined) {\n openaiRequest.parallel_tool_calls = params.parallel_tool_calls;\n }\n }\n\n // Structured output via text.format\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.text = {\n format: {\n type: 'json_schema',\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages to Responses API input items\n */\nfunction transformInputItems(\n messages: Message[],\n system?: string\n): OpenAIResponsesInputItem[] | string {\n const result: OpenAIResponsesInputItem[] = [];\n\n if (system) {\n result.push({\n type: 'message',\n role: 'system',\n content: system,\n });\n }\n\n for (const message of messages) {\n const items = transformMessage(message);\n result.push(...items);\n }\n\n // If there's only one user message with simple text, return as string\n if (result.length === 1 && result[0]?.type === 'message') {\n const item = result[0] as { role?: string; content?: string | unknown[] };\n if (item.role === 'user' && typeof item.content === 'string') {\n return item.content;\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI Responses API input items\n */\nfunction transformMessage(message: Message): OpenAIResponsesInputItem[] {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return [\n {\n type: 'message',\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n },\n ];\n }\n return [\n {\n type: 'message',\n role: 'user',\n content: validContent.map(transformContentPart),\n },\n ];\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n const items: OpenAIResponsesInputItem[] = [];\n\n // Add message content - only text parts for assistant messages\n const contentParts: OpenAIResponsesContentPart[] = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c): OpenAIResponsesContentPart => ({\n type: 'output_text',\n text: c.text,\n }));\n\n // Add assistant message if we have text content\n if (contentParts.length > 0) {\n items.push({\n type: 'message',\n role: 'assistant',\n content: contentParts,\n });\n }\n\n // Add function_call items for each tool call (must precede function_call_output)\n const openaiMeta = message.metadata?.openai as\n | { functionCallItems?: Array<{ id: string; call_id: string; name: string; arguments: string }> }\n | undefined;\n const functionCallItems = openaiMeta?.functionCallItems;\n\n if (functionCallItems && functionCallItems.length > 0) {\n for (const fc of functionCallItems) {\n items.push({\n type: 'function_call',\n id: fc.id,\n call_id: fc.call_id,\n name: fc.name,\n arguments: fc.arguments,\n });\n }\n } else if (message.toolCalls && message.toolCalls.length > 0) {\n for (const call of message.toolCalls) {\n items.push({\n type: 'function_call',\n id: `fc_${call.toolCallId}`,\n call_id: call.toolCallId,\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n });\n }\n }\n\n return items;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are function_call_output items\n return message.results.map((result) => ({\n type: 'function_call_output' as const,\n call_id: result.toolCallId,\n output:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n }\n\n return [];\n}\n\n/**\n * Transform a content block to Responses API format\n */\nfunction transformContentPart(block: ContentBlock): OpenAIResponsesContentPart {\n switch (block.type) {\n case 'text':\n return { type: 'input_text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n if (imageBlock.source.type === 'base64') {\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`,\n };\n }\n\n if (imageBlock.source.type === 'url') {\n return {\n type: 'input_image',\n image_url: imageBlock.source.url,\n };\n }\n\n if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${base64}`,\n };\n }\n\n throw new Error('Unknown image source type');\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to Responses API format\n */\nfunction transformTool(tool: Tool): OpenAIResponsesTool {\n return {\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n };\n}\n\n/**\n * Transform OpenAI Responses API response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAIResponsesResponse): LLMResponse {\n // Extract text content and tool calls from output items\n const textContent: TextBlock[] = [];\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n let hadRefusal = false;\n let structuredData: unknown;\n\n for (const item of data.output) {\n if (item.type === 'message') {\n const messageItem = item as OpenAIResponsesMessageOutput;\n for (const content of messageItem.content) {\n if (content.type === 'output_text') {\n textContent.push({ type: 'text', text: content.text });\n // Try to parse as JSON for structured output (native JSON mode)\n // Only set data if text is valid JSON\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(content.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n } else if (content.type === 'refusal') {\n textContent.push({ type: 'text', text: content.refusal });\n hadRefusal = true;\n }\n }\n } else if (item.type === 'function_call') {\n const functionCall = item as OpenAIResponsesFunctionCallOutput;\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(functionCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: functionCall.call_id,\n toolName: functionCall.name,\n arguments: args,\n });\n functionCallItems.push({\n id: functionCall.id,\n call_id: functionCall.call_id,\n name: functionCall.name,\n arguments: functionCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n status: data.status,\n // Store response_id for multi-turn tool calling\n response_id: data.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.input_tokens,\n outputTokens: data.usage.output_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (data.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (data.status === 'incomplete') {\n stopReason = data.incomplete_details?.reason === 'max_output_tokens'\n ? 'max_tokens'\n : 'end_turn';\n } else if (data.status === 'failed') {\n stopReason = 'error';\n }\n if (hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface ResponsesStreamState {\n id: string;\n model: string;\n textByIndex: Map<number, string>;\n toolCalls: Map<\n number,\n { itemId?: string; callId?: string; name?: string; arguments: string }\n >;\n status: string;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): ResponsesStreamState {\n return {\n id: '',\n model: '',\n textByIndex: new Map(),\n toolCalls: new Map(),\n status: 'in_progress',\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI Responses API stream event to UPP StreamEvent\n * Returns array since one event may produce multiple UPP events\n */\nexport function transformStreamEvent(\n event: OpenAIResponsesStreamEvent,\n state: ResponsesStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n switch (event.type) {\n case 'response.created':\n state.id = event.response.id;\n state.model = event.response.model;\n events.push({ type: 'message_start', index: 0, delta: {} });\n break;\n\n case 'response.in_progress':\n state.status = 'in_progress';\n break;\n\n case 'response.completed':\n state.status = 'completed';\n if (event.response.usage) {\n state.inputTokens = event.response.usage.input_tokens;\n state.outputTokens = event.response.usage.output_tokens;\n }\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.failed':\n state.status = 'failed';\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.output_item.added':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_start',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_item.done':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_stop',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_text.delta':\n // Accumulate text\n const currentText = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentText + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n\n case 'response.output_text.done':\n state.textByIndex.set(event.output_index, event.text);\n break;\n\n case 'response.refusal.delta': {\n state.hadRefusal = true;\n const currentRefusal = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentRefusal + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n }\n\n case 'response.refusal.done':\n state.hadRefusal = true;\n state.textByIndex.set(event.output_index, event.refusal);\n break;\n\n case 'response.function_call_arguments.delta': {\n // Accumulate function call arguments\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id && !toolCall.itemId) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id && !toolCall.callId) {\n toolCall.callId = event.call_id;\n }\n toolCall.arguments += event.delta;\n events.push({\n type: 'tool_call_delta',\n index: event.output_index,\n delta: {\n toolCallId: toolCall.callId ?? toolCall.itemId ?? '',\n toolName: toolCall.name,\n argumentsJson: event.delta,\n },\n });\n break;\n }\n\n case 'response.function_call_arguments.done': {\n // Finalize function call\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id) {\n toolCall.callId = event.call_id;\n }\n toolCall.name = event.name;\n toolCall.arguments = event.arguments;\n break;\n }\n\n case 'error':\n // Error events are handled at the handler level\n break;\n\n default:\n // Ignore other events\n break;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: ResponsesStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n\n // Combine all text content\n for (const [, text] of state.textByIndex) {\n if (text) {\n textContent.push({ type: 'text', text });\n // Try to parse as JSON for structured output (native JSON mode)\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n }\n }\n\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n const itemId = toolCall.itemId ?? '';\n const callId = toolCall.callId ?? toolCall.itemId ?? '';\n const name = toolCall.name ?? '';\n toolCalls.push({\n toolCallId: callId,\n toolName: name,\n arguments: args,\n });\n\n if (itemId && callId && name) {\n functionCallItems.push({\n id: itemId,\n call_id: callId,\n name,\n arguments: toolCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n status: state.status,\n // Store response_id for multi-turn tool calling\n response_id: state.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (state.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (state.status === 'failed') {\n stopReason = 'error';\n }\n if (state.hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAILLMParams, OpenAIResponsesResponse, OpenAIResponsesStreamEvent, OpenAIResponseErrorEvent } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.responses.ts';\n\nconst OPENAI_RESPONSES_API_URL = 'https://api.openai.com/v1/responses';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Responses API LLM handler\n */\nexport function createResponsesLLMHandler(): LLMHandler<OpenAILLMParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAILLMParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAILLMParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAILLMParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAILLMParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAILLMParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAILLMParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAIResponsesResponse;\n\n // Check for error in response\n if (data.status === 'failed' && data.error) {\n throw new UPPError(\n data.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n }\n\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAILLMParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const event = data as OpenAIResponsesStreamEvent;\n\n // Check for error event\n if (event.type === 'error') {\n const errorEvent = event as OpenAIResponseErrorEvent;\n const error = new UPPError(\n errorEvent.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(event, state);\n for (const uppEvent of uppEvents) {\n yield uppEvent;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type {\n Provider,\n ModelReference,\n LLMHandler,\n LLMProvider,\n} from '../../types/provider.ts';\nimport { createCompletionsLLMHandler } from './llm.completions.ts';\nimport { createResponsesLLMHandler } from './llm.responses.ts';\nimport type { OpenAILLMParams, OpenAIConfig } from './types.ts';\n\n/**\n * OpenAI provider options\n */\nexport interface OpenAIProviderOptions {\n /**\n * Which API to use:\n * - 'responses': Modern Responses API (default, recommended)\n * - 'completions': Legacy Chat Completions API\n */\n api?: 'responses' | 'completions';\n}\n\n/**\n * OpenAI provider with configurable API mode\n *\n * @example\n * // Using the modern Responses API (default)\n * const model = openai('gpt-4o');\n *\n * @example\n * // Using the legacy Chat Completions API\n * const model = openai('gpt-4o', { api: 'completions' });\n *\n * @example\n * // Explicit Responses API\n * const model = openai('gpt-4o', { api: 'responses' });\n */\nexport interface OpenAIProvider extends Provider<OpenAIProviderOptions> {\n /**\n * Create a model reference\n * @param modelId - The model identifier (e.g., 'gpt-4o', 'gpt-4-turbo', 'o1-preview')\n * @param options - Provider options including API selection\n */\n (modelId: string, options?: OpenAIProviderOptions): ModelReference<OpenAIProviderOptions>;\n\n /** Provider name */\n readonly name: 'openai';\n\n /** Provider version */\n readonly version: string;\n\n /** Supported modalities */\n readonly modalities: {\n llm: LLMHandler<OpenAILLMParams>;\n };\n}\n\n/**\n * Create the OpenAI provider\n */\nfunction createOpenAIProvider(): OpenAIProvider {\n // Track which API mode is currently active for the modalities\n let currentApiMode: 'responses' | 'completions' = 'responses';\n\n // Create handlers eagerly so we can inject provider reference\n const responsesHandler = createResponsesLLMHandler();\n const completionsHandler = createCompletionsLLMHandler();\n\n const fn = function (\n modelId: string,\n options?: OpenAIProviderOptions\n ): ModelReference<OpenAIProviderOptions> {\n const apiMode = options?.api ?? 'responses';\n currentApiMode = apiMode;\n return { modelId, provider };\n };\n\n // Create a dynamic modalities object that returns the correct handler\n const modalities = {\n get llm(): LLMHandler<OpenAILLMParams> {\n return currentApiMode === 'completions'\n ? completionsHandler\n : responsesHandler;\n },\n };\n\n // Define properties\n Object.defineProperties(fn, {\n name: {\n value: 'openai',\n writable: false,\n configurable: true,\n },\n version: {\n value: '1.0.0',\n writable: false,\n configurable: true,\n },\n modalities: {\n value: modalities,\n writable: false,\n configurable: true,\n },\n });\n\n const provider = fn as OpenAIProvider;\n\n // Inject provider reference into both handlers (spec compliance)\n responsesHandler._setProvider?.(provider as unknown as LLMProvider<OpenAILLMParams>);\n completionsHandler._setProvider?.(provider as unknown as LLMProvider<OpenAILLMParams>);\n\n return provider;\n}\n\n/**\n * OpenAI provider\n *\n * Supports both the modern Responses API (default) and legacy Chat Completions API.\n *\n * @example\n * ```ts\n * import { openai } from './providers/openai';\n * import { llm } from './core/llm';\n *\n * // Using Responses API (default, modern, recommended)\n * const model = llm({\n * model: openai('gpt-4o'),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Using Chat Completions API (legacy)\n * const legacyModel = llm({\n * model: openai('gpt-4o', { api: 'completions' }),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Generate\n * const turn = await model.generate('Hello!');\n * console.log(turn.response.text);\n * ```\n */\nexport const openai = createOpenAIProvider();\n\n// Re-export types\nexport type {\n OpenAILLMParams,\n OpenAIConfig,\n OpenAIAPIMode,\n OpenAIModelOptions,\n OpenAIModelReference,\n} from './types.ts';\n"],"mappings":";;;;;;;;;;;;;;;;AA0BO,SAAS,iBACd,SACA,SAC0B;AAC1B,QAAM,SAA0B,QAAQ,UAAU,CAAC;AAEnD,QAAM,gBAA0C;AAAA,IAC9C,OAAO;AAAA,IACP,UAAU,kBAAkB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC9D;AAGA,MAAI,OAAO,gBAAgB,QAAW;AACpC,kBAAc,cAAc,OAAO;AAAA,EACrC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,0BAA0B,QAAW;AAC9C,kBAAc,wBAAwB,OAAO;AAAA,EAC/C,WAAW,OAAO,eAAe,QAAW;AAC1C,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,MAAM,QAAW;AAC1B,kBAAc,IAAI,OAAO;AAAA,EAC3B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,cAAc,QAAW;AAClC,kBAAc,YAAY,OAAO;AAAA,EACnC;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,2BAA2B,QAAW;AAC/C,kBAAc,yBAAyB,OAAO;AAAA,EAChD;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAI,aAAa;AACrD,QAAI,OAAO,wBAAwB,QAAW;AAC5C,oBAAc,sBAAsB,OAAO;AAAA,IAC7C;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,kBAAkB;AAAA,MAC9B,MAAM;AAAA,MACN,aAAa;AAAA,QACX,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF,WAAW,OAAO,oBAAoB,QAAW;AAE/C,kBAAc,kBAAkB,OAAO;AAAA,EACzC;AAEA,SAAO;AACT;AAKA,SAAS,kBACP,UACA,QAC4B;AAC5B,QAAM,SAAqC,CAAC;AAG5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,aAAW,WAAW,UAAU;AAE9B,QAAI,oBAAoB,OAAO,GAAG;AAChC,YAAM,eAAe,qBAAqB,OAAO;AACjD,aAAO,KAAK,GAAG,YAAY;AAAA,IAC7B,OAAO;AACL,YAAM,cAAc,iBAAiB,OAAO;AAC5C,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,mBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAAS,iBAAiB,SAAmD;AAC3E,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAU,aAAa,CAAC,EAAgB;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,aAAa,IAAI,qBAAqB;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,UAAM,cAAc,aACjB,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AAEV,UAAM,mBAA6C;AAAA,MACjD,MAAM;AAAA,MACN,SAAS,eAAe;AAAA,IAC1B;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AACrD,MAAC,iBAAuD,aACtD,QAAQ,UAAU,IAAI,CAAC,UAAU;AAAA,QAC/B,IAAI,KAAK;AAAA,QACT,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C;AAAA,MACF,EAAE;AAAA,IACN;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAKhC,UAAM,UAAU,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MAC/C,MAAM;AAAA,MACN,cAAc,OAAO;AAAA,MACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAGF,WAAO,QAAQ,CAAC,KAAK;AAAA,EACvB;AAEA,SAAO;AACT;AAKO,SAAS,qBACd,SAC4B;AAC5B,MAAI,CAAC,oBAAoB,OAAO,GAAG;AACjC,UAAM,SAAS,iBAAiB,OAAO;AACvC,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,EAC9B;AAEA,SAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,IACtC,MAAM;AAAA,IACN,cAAc,OAAO;AAAA,IACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,EACpC,EAAE;AACJ;AAKA,SAAS,sBAAsB,OAAwC;AACrE,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,IAE1C,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI;AAEJ,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,cAAM,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,MACpE,WAAW,WAAW,OAAO,SAAS,OAAO;AAC3C,cAAM,WAAW,OAAO;AAAA,MAC1B,WAAW,WAAW,OAAO,SAAS,SAAS;AAE7C,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,cAAM,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,MACpD,OAAO;AACL,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,WAAW,EAAE,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAAS,cAAc,MAAmC;AACxD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY,KAAK,WAAW;AAAA,QAC5B,UAAU,KAAK,WAAW;AAAA,QAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,kBAAkB,MAA8C;AAC9E,QAAM,SAAS,KAAK,QAAQ,CAAC;AAC7B,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAGA,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAE/D,QAAI;AACF,uBAAiB,KAAK,MAAM,OAAO,QAAQ,OAAO;AAAA,IACpD,QAAQ;AAAA,IAER;AAAA,EACF;AACA,MAAI,aAAa;AACjB,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAC/D,iBAAa;AAAA,EACf;AAGA,QAAM,YAAwB,CAAC;AAC/B,MAAI,OAAO,QAAQ,YAAY;AAC7B,eAAW,QAAQ,OAAO,QAAQ,YAAY;AAC5C,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,KAAK,SAAS,SAAS;AAAA,MAC3C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK,SAAS;AAAA,QACxB,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,eAAe,OAAO;AAAA,UACtB,oBAAoB,KAAK;AAAA,UACzB,cAAc,KAAK;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,UAAQ,OAAO,eAAe;AAAA,IAC5B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,cAAc,eAAe,kBAAkB;AACjD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAmBO,SAAS,oBAA4C;AAC1D,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,WAAW,oBAAI,IAAI;AAAA,IACnB,cAAc;AAAA,IACd,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAAS,qBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAG/B,MAAI,MAAM,MAAM,CAAC,MAAM,IAAI;AACzB,UAAM,KAAK,MAAM;AACjB,WAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,EAC5D;AACA,MAAI,MAAM,OAAO;AACf,UAAM,QAAQ,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,MAAI,QAAQ;AAEV,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AACA,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,aAAa;AACnB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,MAAM,YAAY;AAC3B,iBAAW,iBAAiB,OAAO,MAAM,YAAY;AACnD,cAAM,QAAQ,cAAc;AAC5B,YAAI,WAAW,MAAM,UAAU,IAAI,KAAK;AAExC,YAAI,CAAC,UAAU;AACb,qBAAW,EAAE,IAAI,IAAI,MAAM,IAAI,WAAW,GAAG;AAC7C,gBAAM,UAAU,IAAI,OAAO,QAAQ;AAAA,QACrC;AAEA,YAAI,cAAc,IAAI;AACpB,mBAAS,KAAK,cAAc;AAAA,QAC9B;AACA,YAAI,cAAc,UAAU,MAAM;AAChC,mBAAS,OAAO,cAAc,SAAS;AAAA,QACzC;AACA,YAAI,cAAc,UAAU,WAAW;AACrC,mBAAS,aAAa,cAAc,SAAS;AAC7C,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN;AAAA,YACA,OAAO;AAAA,cACL,YAAY,SAAS;AAAA,cACrB,UAAU,SAAS;AAAA,cACnB,eAAe,cAAc,SAAS;AAAA,YACxC;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,eAAe;AACxB,YAAM,eAAe,OAAO;AAC5B,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,IAC3D;AAAA,EACF;AAGA,MAAI,MAAM,OAAO;AACf,UAAM,cAAc,MAAM,MAAM;AAChC,UAAM,eAAe,MAAM,MAAM;AAAA,EACnC;AAEA,SAAO;AACT;AAKO,SAAS,uBAAuB,OAA4C;AACjF,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,MAAM,MAAM;AACd,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAEnD,QAAI;AACF,uBAAiB,KAAK,MAAM,MAAM,IAAI;AAAA,IACxC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,cAAU,KAAK;AAAA,MACb,YAAY,SAAS;AAAA,MACrB,UAAU,SAAS;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,eAAe,MAAM;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,UAAQ,MAAM,cAAc;AAAA,IAC1B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,MAAM,cAAc,eAAe,kBAAkB;AACvD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AClmBA,IAAM,iBAAiB;AAKvB,IAAM,sBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,8BAA2D;AAEzE,MAAI,cAAmD;AAEvD,SAAO;AAAA,IACL,aAAa,UAAwC;AACnD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAiD;AAEpD,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAwC;AAAA,QAC5C;AAAA,QACA,cAAc;AAAA,QAEd,IAAI,WAAyC;AAC3C,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAA4D;AACzE,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAO,iBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,iBAAO,kBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAAuD;AAC5D,gBAAM,QAAQ,kBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAO,iBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AACd,mBAAK,iBAAiB,EAAE,eAAe,KAAK;AAE5C,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,0BAAM,YAAY,MAAM;AACxB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,UAAU,WAAW;AAAA,sBACrB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAY,qBAAqB,OAAO,KAAK;AACnD,6BAAW,SAAS,WAAW;AAC7B,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgB,uBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AC5KO,SAASA,kBACd,SACA,SACwB;AACxB,QAAM,SAA0B,QAAQ,UAAU,CAAC;AAEnD,QAAM,gBAAwC;AAAA,IAC5C,OAAO;AAAA,IACP,OAAO,oBAAoB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC7D;AAGA,MAAI,OAAO,gBAAgB,QAAW;AACpC,kBAAc,cAAc,OAAO;AAAA,EACrC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C,WAAW,OAAO,0BAA0B,QAAW;AACrD,kBAAc,oBAAoB,OAAO;AAAA,EAC3C,WAAW,OAAO,eAAe,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,YAAY,QAAW;AAChC,kBAAc,UAAU,OAAO;AAAA,EACjC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,yBAAyB,QAAW;AAC7C,kBAAc,uBAAuB,OAAO;AAAA,EAC9C;AACA,MAAI,OAAO,cAAc,QAAW;AAClC,kBAAc,YAAY,EAAE,GAAG,OAAO,UAAU;AAAA,EAClD;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,YAAY;AAAA,MACxB,GAAI,cAAc,aAAa,CAAC;AAAA,MAChC,QAAQ,OAAO;AAAA,IACjB;AAAA,EACF;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAIC,cAAa;AACrD,QAAI,OAAO,wBAAwB,QAAW;AAC5C,oBAAc,sBAAsB,OAAO;AAAA,IAC7C;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,OAAO;AAAA,MACnB,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,oBACP,UACA,QACqC;AACrC,QAAM,SAAqC,CAAC;AAE5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQC,kBAAiB,OAAO;AACtC,WAAO,KAAK,GAAG,KAAK;AAAA,EACtB;AAGA,MAAI,OAAO,WAAW,KAAK,OAAO,CAAC,GAAG,SAAS,WAAW;AACxD,UAAM,OAAO,OAAO,CAAC;AACrB,QAAI,KAAK,SAAS,UAAU,OAAO,KAAK,YAAY,UAAU;AAC5D,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAASC,oBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAASD,kBAAiB,SAA8C;AACtE,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAeC,oBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAU,aAAa,CAAC,EAAgB;AAAA,QAC1C;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS,aAAa,IAAI,oBAAoB;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAeA,oBAAmB,QAAQ,OAAO;AACvD,UAAM,QAAoC,CAAC;AAG3C,UAAM,eAA6C,aAChD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,OAAmC;AAAA,MACvC,MAAM;AAAA,MACN,MAAM,EAAE;AAAA,IACV,EAAE;AAGJ,QAAI,aAAa,SAAS,GAAG;AAC3B,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,aAAa,QAAQ,UAAU;AAGrC,UAAM,oBAAoB,YAAY;AAEtC,QAAI,qBAAqB,kBAAkB,SAAS,GAAG;AACrD,iBAAW,MAAM,mBAAmB;AAClC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,SAAS,GAAG;AAAA,UACZ,MAAM,GAAG;AAAA,UACT,WAAW,GAAG;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC5D,iBAAW,QAAQ,QAAQ,WAAW;AACpC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,MAAM,KAAK,UAAU;AAAA,UACzB,SAAS,KAAK;AAAA,UACd,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAEhC,WAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MACtC,MAAM;AAAA,MACN,SAAS,OAAO;AAAA,MAChB,QACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAAA,EACJ;AAEA,SAAO,CAAC;AACV;AAKA,SAAS,qBAAqB,OAAiD;AAC7E,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,cAAc,MAAM,MAAM,KAAK;AAAA,IAEhD,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,QACzE;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,OAAO;AACpC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,WAAW,OAAO;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,SAAS;AAEtC,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,QACzD;AAAA,MACF;AAEA,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAASF,eAAc,MAAiC;AACtD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,YAAY;AAAA,MACV,MAAM;AAAA,MACN,YAAY,KAAK,WAAW;AAAA,MAC5B,UAAU,KAAK,WAAW;AAAA,MAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAASG,mBAAkB,MAA4C;AAE5E,QAAM,cAA2B,CAAC;AAClC,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,MAAI,aAAa;AACjB,MAAI;AAEJ,aAAW,QAAQ,KAAK,QAAQ;AAC9B,QAAI,KAAK,SAAS,WAAW;AAC3B,YAAM,cAAc;AACpB,iBAAW,WAAW,YAAY,SAAS;AACzC,YAAI,QAAQ,SAAS,eAAe;AAClC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,KAAK,CAAC;AAGrD,cAAI,mBAAmB,QAAW;AAChC,gBAAI;AACF,+BAAiB,KAAK,MAAM,QAAQ,IAAI;AAAA,YAC1C,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,WAAW,QAAQ,SAAS,WAAW;AACrC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AACxD,uBAAa;AAAA,QACf;AAAA,MACF;AAAA,IACF,WAAW,KAAK,SAAS,iBAAiB;AACxC,YAAM,eAAe;AACrB,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,aAAa,SAAS;AAAA,MAC1C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,aAAa;AAAA,QACzB,UAAU,aAAa;AAAA,QACvB,WAAW;AAAA,MACb,CAAC;AACD,wBAAkB,KAAK;AAAA,QACrB,IAAI,aAAa;AAAA,QACjB,SAAS,aAAa;AAAA,QACtB,MAAM,aAAa;AAAA,QACnB,WAAW,aAAa;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,QAAQ,KAAK;AAAA;AAAA,UAEb,aAAa,KAAK;AAAA,UAClB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,MAAI,KAAK,WAAW,aAAa;AAC/B,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,KAAK,WAAW,cAAc;AACvC,iBAAa,KAAK,oBAAoB,WAAW,sBAC7C,eACA;AAAA,EACN,WAAW,KAAK,WAAW,UAAU;AACnC,iBAAa;AAAA,EACf;AACA,MAAI,cAAc,eAAe,SAAS;AACxC,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAsBO,SAASC,qBAA0C;AACxD,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,aAAa,oBAAI,IAAI;AAAA,IACrB,WAAW,oBAAI,IAAI;AAAA,IACnB,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAASC,sBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAE/B,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,YAAM,KAAK,MAAM,SAAS;AAC1B,YAAM,QAAQ,MAAM,SAAS;AAC7B,aAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAC1D;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,UAAI,MAAM,SAAS,OAAO;AACxB,cAAM,cAAc,MAAM,SAAS,MAAM;AACzC,cAAM,eAAe,MAAM,SAAS,MAAM;AAAA,MAC5C;AACA,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AAEH,YAAM,cAAc,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACjE,YAAM,YAAY,IAAI,MAAM,cAAc,cAAc,MAAM,KAAK;AACnE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IAEF,KAAK;AACH,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,IAAI;AACpD;AAAA,IAEF,KAAK,0BAA0B;AAC7B,YAAM,aAAa;AACnB,YAAM,iBAAiB,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACpE,YAAM,YAAY,IAAI,MAAM,cAAc,iBAAiB,MAAM,KAAK;AACtE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK;AACH,YAAM,aAAa;AACnB,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,OAAO;AACvD;AAAA,IAEF,KAAK,0CAA0C;AAE7C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,aAAa,MAAM;AAC5B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO;AAAA,UACL,YAAY,SAAS,UAAU,SAAS,UAAU;AAAA,UAClD,UAAU,SAAS;AAAA,UACnB,eAAe,MAAM;AAAA,QACvB;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK,yCAAyC;AAE5C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,OAAO,MAAM;AACtB,eAAS,YAAY,MAAM;AAC3B;AAAA,IACF;AAAA,IAEA,KAAK;AAEH;AAAA,IAEF;AAEE;AAAA,EACJ;AAEA,SAAO;AACT;AAKO,SAASC,wBAAuB,OAA0C;AAC/E,QAAM,cAA2B,CAAC;AAClC,MAAI;AAGJ,aAAW,CAAC,EAAE,IAAI,KAAK,MAAM,aAAa;AACxC,QAAI,MAAM;AACR,kBAAY,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAEvC,UAAI,mBAAmB,QAAW;AAChC,YAAI;AACF,2BAAiB,KAAK,MAAM,IAAI;AAAA,QAClC,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,UAAM,SAAS,SAAS,UAAU;AAClC,UAAM,SAAS,SAAS,UAAU,SAAS,UAAU;AACrD,UAAM,OAAO,SAAS,QAAQ;AAC9B,cAAU,KAAK;AAAA,MACb,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,WAAW;AAAA,IACb,CAAC;AAED,QAAI,UAAU,UAAU,MAAM;AAC5B,wBAAkB,KAAK;AAAA,QACrB,IAAI;AAAA,QACJ,SAAS;AAAA,QACT;AAAA,QACA,WAAW,SAAS;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA;AAAA,UAEd,aAAa,MAAM;AAAA,UACnB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,MAAI,MAAM,WAAW,aAAa;AAChC,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,MAAM,WAAW,UAAU;AACpC,iBAAa;AAAA,EACf;AACA,MAAI,MAAM,cAAc,eAAe,SAAS;AAC9C,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AC5rBA,IAAM,2BAA2B;AAKjC,IAAMC,uBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,4BAAyD;AAEvE,MAAI,cAAmD;AAEvD,SAAO;AAAA,IACL,aAAa,UAAwC;AACnD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAiD;AAEpD,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAwC;AAAA,QAC5C;AAAA,QACA,cAAcA;AAAA,QAEd,IAAI,WAAyC;AAC3C,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAA4D;AACzE,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAOC,kBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAGlC,cAAI,KAAK,WAAW,YAAY,KAAK,OAAO;AAC1C,kBAAM,IAAI;AAAA,cACR,KAAK,MAAM;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAEA,iBAAOC,mBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAAuD;AAC5D,gBAAM,QAAQC,mBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAOF,kBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AAEd,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,MAAM,SAAS,SAAS;AAC1B,0BAAM,aAAa;AACnB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,WAAW,MAAM;AAAA,sBACjB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAYG,sBAAqB,OAAO,KAAK;AACnD,6BAAW,YAAY,WAAW;AAChC,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgBC,wBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACtJA,SAAS,uBAAuC;AAE9C,MAAI,iBAA8C;AAGlD,QAAM,mBAAmB,0BAA0B;AACnD,QAAM,qBAAqB,4BAA4B;AAEvD,QAAM,KAAK,SACT,SACA,SACuC;AACvC,UAAM,UAAU,SAAS,OAAO;AAChC,qBAAiB;AACjB,WAAO,EAAE,SAAS,SAAS;AAAA,EAC7B;AAGA,QAAM,aAAa;AAAA,IACjB,IAAI,MAAmC;AACrC,aAAO,mBAAmB,gBACtB,qBACA;AAAA,IACN;AAAA,EACF;AAGA,SAAO,iBAAiB,IAAI;AAAA,IAC1B,MAAM;AAAA,MACJ,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,YAAY;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,EACF,CAAC;AAED,QAAM,WAAW;AAGjB,mBAAiB,eAAe,QAAmD;AACnF,qBAAmB,eAAe,QAAmD;AAErF,SAAO;AACT;AA6BO,IAAM,SAAS,qBAAqB;","names":["transformRequest","transformTool","transformMessage","filterValidContent","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState","OPENAI_CAPABILITIES","transformRequest","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState"]}
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* UPP Error Codes
|
|
3
|
+
* Normalized error codes for cross-provider error handling
|
|
4
|
+
*/
|
|
5
|
+
type ErrorCode = 'AUTHENTICATION_FAILED' | 'RATE_LIMITED' | 'CONTEXT_LENGTH_EXCEEDED' | 'MODEL_NOT_FOUND' | 'INVALID_REQUEST' | 'INVALID_RESPONSE' | 'CONTENT_FILTERED' | 'QUOTA_EXCEEDED' | 'PROVIDER_ERROR' | 'NETWORK_ERROR' | 'TIMEOUT' | 'CANCELLED';
|
|
6
|
+
/**
|
|
7
|
+
* Modality types supported by UPP
|
|
8
|
+
*/
|
|
9
|
+
type Modality = 'llm' | 'embedding' | 'image' | 'audio' | 'video';
|
|
10
|
+
/**
|
|
11
|
+
* Unified Provider Protocol Error
|
|
12
|
+
* All provider errors are normalized to this type
|
|
13
|
+
*/
|
|
14
|
+
declare class UPPError extends Error {
|
|
15
|
+
readonly code: ErrorCode;
|
|
16
|
+
readonly provider: string;
|
|
17
|
+
readonly modality: Modality;
|
|
18
|
+
readonly statusCode?: number;
|
|
19
|
+
readonly cause?: Error;
|
|
20
|
+
readonly name = "UPPError";
|
|
21
|
+
constructor(message: string, code: ErrorCode, provider: string, modality: Modality, statusCode?: number, cause?: Error);
|
|
22
|
+
/**
|
|
23
|
+
* Create a string representation of the error
|
|
24
|
+
*/
|
|
25
|
+
toString(): string;
|
|
26
|
+
/**
|
|
27
|
+
* Convert to JSON for serialization
|
|
28
|
+
*/
|
|
29
|
+
toJSON(): Record<string, unknown>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* API key strategy interface for managing multiple keys
|
|
34
|
+
*/
|
|
35
|
+
interface KeyStrategy {
|
|
36
|
+
/** Get the next API key to use */
|
|
37
|
+
getKey(): string | Promise<string>;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Retry strategy interface
|
|
41
|
+
*/
|
|
42
|
+
interface RetryStrategy {
|
|
43
|
+
/**
|
|
44
|
+
* Called when a request fails with a retryable error.
|
|
45
|
+
* @param error - The error that occurred
|
|
46
|
+
* @param attempt - The attempt number (1 = first retry)
|
|
47
|
+
* @returns Delay in ms before retrying, or null to stop retrying
|
|
48
|
+
*/
|
|
49
|
+
onRetry(error: UPPError, attempt: number): number | null | Promise<number | null>;
|
|
50
|
+
/**
|
|
51
|
+
* Called before each request. Can be used to implement pre-emptive rate limiting.
|
|
52
|
+
* Returns delay in ms to wait before making the request, or 0 to proceed immediately.
|
|
53
|
+
*/
|
|
54
|
+
beforeRequest?(): number | Promise<number>;
|
|
55
|
+
/**
|
|
56
|
+
* Reset the strategy state (e.g., after a successful request)
|
|
57
|
+
*/
|
|
58
|
+
reset?(): void;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Provider configuration for infrastructure/connection settings
|
|
62
|
+
*/
|
|
63
|
+
interface ProviderConfig {
|
|
64
|
+
/**
|
|
65
|
+
* API key - string, async function, or key strategy
|
|
66
|
+
* @example 'sk-xxx'
|
|
67
|
+
* @example () => fetchKeyFromVault()
|
|
68
|
+
* @example new RoundRobinKeys(['sk-1', 'sk-2'])
|
|
69
|
+
*/
|
|
70
|
+
apiKey?: string | (() => string | Promise<string>) | KeyStrategy;
|
|
71
|
+
/** Override the base API URL (for proxies, local models) */
|
|
72
|
+
baseUrl?: string;
|
|
73
|
+
/** Request timeout in milliseconds */
|
|
74
|
+
timeout?: number;
|
|
75
|
+
/** Custom fetch implementation (for logging, caching, custom TLS) */
|
|
76
|
+
fetch?: typeof fetch;
|
|
77
|
+
/** API version override */
|
|
78
|
+
apiVersion?: string;
|
|
79
|
+
/** Retry strategy for handling failures and rate limits */
|
|
80
|
+
retryStrategy?: RetryStrategy;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* A reference to a model, created by a provider factory
|
|
84
|
+
*
|
|
85
|
+
* @typeParam TOptions - Provider-specific options type
|
|
86
|
+
*/
|
|
87
|
+
interface ModelReference<TOptions = unknown> {
|
|
88
|
+
/** The model identifier */
|
|
89
|
+
readonly modelId: string;
|
|
90
|
+
/** The provider that created this reference */
|
|
91
|
+
readonly provider: Provider<TOptions>;
|
|
92
|
+
}
|
|
93
|
+
interface LLMHandler<TParams = any> {
|
|
94
|
+
/** Bind model ID to create executable model */
|
|
95
|
+
bind(modelId: string): BoundLLMModel<TParams>;
|
|
96
|
+
/**
|
|
97
|
+
* Internal: Set the parent provider reference.
|
|
98
|
+
* Called by createProvider() after the provider is constructed.
|
|
99
|
+
* This allows bind() to return models with the correct provider reference.
|
|
100
|
+
* @internal
|
|
101
|
+
*/
|
|
102
|
+
_setProvider?(provider: LLMProvider<TParams>): void;
|
|
103
|
+
}
|
|
104
|
+
interface EmbeddingHandler<TParams = any> {
|
|
105
|
+
/** Supported input types */
|
|
106
|
+
readonly supportedInputs: ('text' | 'image')[];
|
|
107
|
+
/** Bind model ID to create executable model */
|
|
108
|
+
bind(modelId: string): BoundEmbeddingModel<TParams>;
|
|
109
|
+
/**
|
|
110
|
+
* Internal: Set the parent provider reference.
|
|
111
|
+
* @internal
|
|
112
|
+
*/
|
|
113
|
+
_setProvider?(provider: EmbeddingProvider<TParams>): void;
|
|
114
|
+
}
|
|
115
|
+
interface ImageHandler<TParams = any> {
|
|
116
|
+
/** Bind model ID to create executable model */
|
|
117
|
+
bind(modelId: string): BoundImageModel<TParams>;
|
|
118
|
+
/**
|
|
119
|
+
* Internal: Set the parent provider reference.
|
|
120
|
+
* @internal
|
|
121
|
+
*/
|
|
122
|
+
_setProvider?(provider: ImageProvider<TParams>): void;
|
|
123
|
+
}
|
|
124
|
+
interface BoundLLMModel<TParams = any> {
|
|
125
|
+
readonly modelId: string;
|
|
126
|
+
readonly provider: LLMProvider<TParams>;
|
|
127
|
+
}
|
|
128
|
+
interface BoundEmbeddingModel<TParams = any> {
|
|
129
|
+
readonly modelId: string;
|
|
130
|
+
readonly provider: EmbeddingProvider<TParams>;
|
|
131
|
+
readonly maxBatchSize: number;
|
|
132
|
+
readonly maxInputLength: number;
|
|
133
|
+
readonly dimensions: number;
|
|
134
|
+
}
|
|
135
|
+
interface BoundImageModel<TParams = any> {
|
|
136
|
+
readonly modelId: string;
|
|
137
|
+
readonly provider: ImageProvider<TParams>;
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* A provider factory function with metadata and modality handlers.
|
|
141
|
+
*
|
|
142
|
+
* @typeParam TOptions - Provider-specific options passed to the factory function
|
|
143
|
+
*/
|
|
144
|
+
interface Provider<TOptions = unknown> {
|
|
145
|
+
/** Create a model reference, optionally with provider-specific options */
|
|
146
|
+
(modelId: string, options?: TOptions): ModelReference<TOptions>;
|
|
147
|
+
/** Provider name */
|
|
148
|
+
readonly name: string;
|
|
149
|
+
/** Provider version */
|
|
150
|
+
readonly version: string;
|
|
151
|
+
/** Supported modalities */
|
|
152
|
+
readonly modalities: {
|
|
153
|
+
llm?: LLMHandler;
|
|
154
|
+
embedding?: EmbeddingHandler;
|
|
155
|
+
image?: ImageHandler;
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Provider with LLM modality
|
|
160
|
+
*
|
|
161
|
+
* @typeParam TParams - Model-specific parameters type
|
|
162
|
+
* @typeParam TOptions - Provider-specific options type
|
|
163
|
+
*/
|
|
164
|
+
type LLMProvider<TParams = any, TOptions = unknown> = Provider<TOptions> & {
|
|
165
|
+
readonly modalities: {
|
|
166
|
+
llm: LLMHandler<TParams>;
|
|
167
|
+
};
|
|
168
|
+
};
|
|
169
|
+
/**
|
|
170
|
+
* Provider with Embedding modality
|
|
171
|
+
*
|
|
172
|
+
* @typeParam TParams - Model-specific parameters type
|
|
173
|
+
* @typeParam TOptions - Provider-specific options type
|
|
174
|
+
*/
|
|
175
|
+
type EmbeddingProvider<TParams = any, TOptions = unknown> = Provider<TOptions> & {
|
|
176
|
+
readonly modalities: {
|
|
177
|
+
embedding: EmbeddingHandler<TParams>;
|
|
178
|
+
};
|
|
179
|
+
};
|
|
180
|
+
/**
|
|
181
|
+
* Provider with Image modality
|
|
182
|
+
*
|
|
183
|
+
* @typeParam TParams - Model-specific parameters type
|
|
184
|
+
* @typeParam TOptions - Provider-specific options type
|
|
185
|
+
*/
|
|
186
|
+
type ImageProvider<TParams = any, TOptions = unknown> = Provider<TOptions> & {
|
|
187
|
+
readonly modalities: {
|
|
188
|
+
image: ImageHandler<TParams>;
|
|
189
|
+
};
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
export { type BoundEmbeddingModel as B, type EmbeddingHandler as E, type ImageHandler as I, type KeyStrategy as K, type LLMProvider as L, type ModelReference as M, type ProviderConfig as P, type RetryStrategy as R, UPPError as U, type LLMHandler as a, type Provider as b, type ErrorCode as c, type Modality as d, type EmbeddingProvider as e, type ImageProvider as f, type BoundImageModel as g };
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { K as KeyStrategy, P as ProviderConfig, d as Modality, R as RetryStrategy, U as UPPError } from './provider-CUJWjgNl.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Round-robin through a list of API keys
|
|
5
|
+
*/
|
|
6
|
+
declare class RoundRobinKeys implements KeyStrategy {
|
|
7
|
+
private keys;
|
|
8
|
+
private index;
|
|
9
|
+
constructor(keys: string[]);
|
|
10
|
+
getKey(): string;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Weighted random selection of API keys
|
|
14
|
+
*/
|
|
15
|
+
declare class WeightedKeys implements KeyStrategy {
|
|
16
|
+
private entries;
|
|
17
|
+
private totalWeight;
|
|
18
|
+
constructor(keys: Array<{
|
|
19
|
+
key: string;
|
|
20
|
+
weight: number;
|
|
21
|
+
}>);
|
|
22
|
+
getKey(): string;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Dynamic key selection based on custom logic
|
|
26
|
+
*/
|
|
27
|
+
declare class DynamicKey implements KeyStrategy {
|
|
28
|
+
private selector;
|
|
29
|
+
constructor(selector: () => string | Promise<string>);
|
|
30
|
+
getKey(): Promise<string>;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Resolve API key from ProviderConfig
|
|
34
|
+
* Falls back to environment variable if provided and config.apiKey is not set
|
|
35
|
+
* Throws UPPError with AUTHENTICATION_FAILED if no key is available
|
|
36
|
+
*
|
|
37
|
+
* @param config - Provider configuration
|
|
38
|
+
* @param envVar - Environment variable name to check as fallback
|
|
39
|
+
* @param provider - Provider name for error messages
|
|
40
|
+
* @param modality - Modality for error messages
|
|
41
|
+
*/
|
|
42
|
+
declare function resolveApiKey(config: ProviderConfig, envVar?: string, provider?: string, modality?: Modality): Promise<string>;
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Exponential backoff retry strategy
|
|
46
|
+
*/
|
|
47
|
+
declare class ExponentialBackoff implements RetryStrategy {
|
|
48
|
+
private maxAttempts;
|
|
49
|
+
private baseDelay;
|
|
50
|
+
private maxDelay;
|
|
51
|
+
private jitter;
|
|
52
|
+
constructor(options?: {
|
|
53
|
+
maxAttempts?: number;
|
|
54
|
+
baseDelay?: number;
|
|
55
|
+
maxDelay?: number;
|
|
56
|
+
jitter?: boolean;
|
|
57
|
+
});
|
|
58
|
+
onRetry(error: UPPError, attempt: number): number | null;
|
|
59
|
+
private isRetryable;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Linear backoff retry strategy
|
|
63
|
+
*/
|
|
64
|
+
declare class LinearBackoff implements RetryStrategy {
|
|
65
|
+
private maxAttempts;
|
|
66
|
+
private delay;
|
|
67
|
+
constructor(options?: {
|
|
68
|
+
maxAttempts?: number;
|
|
69
|
+
delay?: number;
|
|
70
|
+
});
|
|
71
|
+
onRetry(error: UPPError, attempt: number): number | null;
|
|
72
|
+
private isRetryable;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* No retry strategy - fail immediately
|
|
76
|
+
*/
|
|
77
|
+
declare class NoRetry implements RetryStrategy {
|
|
78
|
+
onRetry(): null;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Token bucket rate limiter with retry
|
|
82
|
+
*/
|
|
83
|
+
declare class TokenBucket implements RetryStrategy {
|
|
84
|
+
private tokens;
|
|
85
|
+
private maxTokens;
|
|
86
|
+
private refillRate;
|
|
87
|
+
private lastRefill;
|
|
88
|
+
private maxAttempts;
|
|
89
|
+
constructor(options?: {
|
|
90
|
+
maxTokens?: number;
|
|
91
|
+
refillRate?: number;
|
|
92
|
+
maxAttempts?: number;
|
|
93
|
+
});
|
|
94
|
+
beforeRequest(): number;
|
|
95
|
+
onRetry(error: UPPError, attempt: number): number | null;
|
|
96
|
+
reset(): void;
|
|
97
|
+
private refill;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Retry strategy that respects Retry-After headers
|
|
101
|
+
*/
|
|
102
|
+
declare class RetryAfterStrategy implements RetryStrategy {
|
|
103
|
+
private maxAttempts;
|
|
104
|
+
private fallbackDelay;
|
|
105
|
+
private lastRetryAfter?;
|
|
106
|
+
constructor(options?: {
|
|
107
|
+
maxAttempts?: number;
|
|
108
|
+
fallbackDelay?: number;
|
|
109
|
+
});
|
|
110
|
+
/**
|
|
111
|
+
* Set the Retry-After value from response headers
|
|
112
|
+
* Call this before onRetry when you have a Retry-After header
|
|
113
|
+
*/
|
|
114
|
+
setRetryAfter(seconds: number): void;
|
|
115
|
+
onRetry(error: UPPError, attempt: number): number | null;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export { DynamicKey as D, ExponentialBackoff as E, LinearBackoff as L, NoRetry as N, RoundRobinKeys as R, TokenBucket as T, WeightedKeys as W, RetryAfterStrategy as a, resolveApiKey as r };
|
package/package.json
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@providerprotocol/ai",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "UPP-1.1: Unified Provider Protocol for AI inference",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": {
|
|
7
|
+
"name": "Provider Protocol"
|
|
8
|
+
},
|
|
9
|
+
"type": "module",
|
|
10
|
+
"main": "./dist/index.js",
|
|
11
|
+
"module": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"exports": {
|
|
14
|
+
".": {
|
|
15
|
+
"types": "./dist/index.d.ts",
|
|
16
|
+
"bun": "./src/index.ts",
|
|
17
|
+
"import": "./dist/index.js",
|
|
18
|
+
"default": "./dist/index.js"
|
|
19
|
+
},
|
|
20
|
+
"./anthropic": {
|
|
21
|
+
"types": "./dist/anthropic/index.d.ts",
|
|
22
|
+
"bun": "./src/anthropic/index.ts",
|
|
23
|
+
"import": "./dist/anthropic/index.js",
|
|
24
|
+
"default": "./dist/anthropic/index.js"
|
|
25
|
+
},
|
|
26
|
+
"./openai": {
|
|
27
|
+
"types": "./dist/openai/index.d.ts",
|
|
28
|
+
"bun": "./src/openai/index.ts",
|
|
29
|
+
"import": "./dist/openai/index.js",
|
|
30
|
+
"default": "./dist/openai/index.js"
|
|
31
|
+
},
|
|
32
|
+
"./google": {
|
|
33
|
+
"types": "./dist/google/index.d.ts",
|
|
34
|
+
"bun": "./src/google/index.ts",
|
|
35
|
+
"import": "./dist/google/index.js",
|
|
36
|
+
"default": "./dist/google/index.js"
|
|
37
|
+
},
|
|
38
|
+
"./http": {
|
|
39
|
+
"types": "./dist/http/index.d.ts",
|
|
40
|
+
"bun": "./src/http/index.ts",
|
|
41
|
+
"import": "./dist/http/index.js",
|
|
42
|
+
"default": "./dist/http/index.js"
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
"files": [
|
|
46
|
+
"dist",
|
|
47
|
+
"src",
|
|
48
|
+
"README.md"
|
|
49
|
+
],
|
|
50
|
+
"scripts": {
|
|
51
|
+
"build": "tsup",
|
|
52
|
+
"prepublishOnly": "bun run build",
|
|
53
|
+
"test": "bun test",
|
|
54
|
+
"test:unit": "bun test tests/unit",
|
|
55
|
+
"test:live": "bun test tests/live",
|
|
56
|
+
"typecheck": "tsc --noEmit"
|
|
57
|
+
},
|
|
58
|
+
"publishConfig": {
|
|
59
|
+
"access": "public"
|
|
60
|
+
},
|
|
61
|
+
"devDependencies": {
|
|
62
|
+
"@types/bun": "latest",
|
|
63
|
+
"tsup": "^8.5.1"
|
|
64
|
+
},
|
|
65
|
+
"peerDependencies": {
|
|
66
|
+
"typescript": "^5"
|
|
67
|
+
},
|
|
68
|
+
"keywords": [
|
|
69
|
+
"ai",
|
|
70
|
+
"llm",
|
|
71
|
+
"anthropic",
|
|
72
|
+
"openai",
|
|
73
|
+
"google",
|
|
74
|
+
"gemini",
|
|
75
|
+
"claude",
|
|
76
|
+
"gpt",
|
|
77
|
+
"provider",
|
|
78
|
+
"protocol",
|
|
79
|
+
"unified"
|
|
80
|
+
],
|
|
81
|
+
"repository": {
|
|
82
|
+
"type": "git",
|
|
83
|
+
"url": "https://github.com/providerprotocol/ai"
|
|
84
|
+
},
|
|
85
|
+
"bugs": {
|
|
86
|
+
"url": "https://github.com/providerprotocol/ai/issues"
|
|
87
|
+
}
|
|
88
|
+
}
|