@providerprotocol/ai 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -0
- package/dist/anthropic/index.js +1 -24
- package/dist/anthropic/index.js.map +1 -1
- package/dist/google/index.js +3 -46
- package/dist/google/index.js.map +1 -1
- package/dist/index.js +5 -1
- package/dist/index.js.map +1 -1
- package/dist/ollama/index.js +13 -44
- package/dist/ollama/index.js.map +1 -1
- package/dist/openai/index.d.ts +46 -27
- package/dist/openai/index.js +2 -116
- package/dist/openai/index.js.map +1 -1
- package/dist/openrouter/index.d.ts +23 -10
- package/dist/openrouter/index.js +2 -85
- package/dist/openrouter/index.js.map +1 -1
- package/dist/xai/index.d.ts +306 -0
- package/dist/xai/index.js +1696 -0
- package/dist/xai/index.js.map +1 -0
- package/package.json +9 -1
- package/src/core/llm.ts +6 -1
- package/src/openai/index.ts +2 -1
- package/src/openrouter/index.ts +2 -1
- package/src/providers/anthropic/transform.ts +7 -29
- package/src/providers/google/transform.ts +9 -49
- package/src/providers/ollama/transform.ts +27 -49
- package/src/providers/openai/index.ts +12 -8
- package/src/providers/openai/llm.completions.ts +9 -9
- package/src/providers/openai/llm.responses.ts +9 -9
- package/src/providers/openai/transform.completions.ts +12 -79
- package/src/providers/openai/transform.responses.ts +12 -54
- package/src/providers/openai/types.ts +54 -31
- package/src/providers/openrouter/index.ts +12 -8
- package/src/providers/openrouter/llm.completions.ts +9 -9
- package/src/providers/openrouter/llm.responses.ts +9 -9
- package/src/providers/openrouter/transform.completions.ts +12 -79
- package/src/providers/openrouter/transform.responses.ts +12 -25
- package/src/providers/openrouter/types.ts +22 -28
- package/src/providers/xai/index.ts +223 -0
- package/src/providers/xai/llm.completions.ts +201 -0
- package/src/providers/xai/llm.messages.ts +195 -0
- package/src/providers/xai/llm.responses.ts +211 -0
- package/src/providers/xai/transform.completions.ts +565 -0
- package/src/providers/xai/transform.messages.ts +448 -0
- package/src/providers/xai/transform.responses.ts +678 -0
- package/src/providers/xai/types.ts +938 -0
- package/src/xai/index.ts +41 -0
package/dist/openai/index.js
CHANGED
|
@@ -19,79 +19,12 @@ import {
|
|
|
19
19
|
function transformRequest(request, modelId) {
|
|
20
20
|
const params = request.params ?? {};
|
|
21
21
|
const openaiRequest = {
|
|
22
|
+
...params,
|
|
22
23
|
model: modelId,
|
|
23
24
|
messages: transformMessages(request.messages, request.system)
|
|
24
25
|
};
|
|
25
|
-
if (params.temperature !== void 0) {
|
|
26
|
-
openaiRequest.temperature = params.temperature;
|
|
27
|
-
}
|
|
28
|
-
if (params.top_p !== void 0) {
|
|
29
|
-
openaiRequest.top_p = params.top_p;
|
|
30
|
-
}
|
|
31
|
-
if (params.max_completion_tokens !== void 0) {
|
|
32
|
-
openaiRequest.max_completion_tokens = params.max_completion_tokens;
|
|
33
|
-
} else if (params.max_tokens !== void 0) {
|
|
34
|
-
openaiRequest.max_tokens = params.max_tokens;
|
|
35
|
-
}
|
|
36
|
-
if (params.frequency_penalty !== void 0) {
|
|
37
|
-
openaiRequest.frequency_penalty = params.frequency_penalty;
|
|
38
|
-
}
|
|
39
|
-
if (params.presence_penalty !== void 0) {
|
|
40
|
-
openaiRequest.presence_penalty = params.presence_penalty;
|
|
41
|
-
}
|
|
42
|
-
if (params.stop !== void 0) {
|
|
43
|
-
openaiRequest.stop = params.stop;
|
|
44
|
-
}
|
|
45
|
-
if (params.n !== void 0) {
|
|
46
|
-
openaiRequest.n = params.n;
|
|
47
|
-
}
|
|
48
|
-
if (params.logprobs !== void 0) {
|
|
49
|
-
openaiRequest.logprobs = params.logprobs;
|
|
50
|
-
}
|
|
51
|
-
if (params.top_logprobs !== void 0) {
|
|
52
|
-
openaiRequest.top_logprobs = params.top_logprobs;
|
|
53
|
-
}
|
|
54
|
-
if (params.seed !== void 0) {
|
|
55
|
-
openaiRequest.seed = params.seed;
|
|
56
|
-
}
|
|
57
|
-
if (params.user !== void 0) {
|
|
58
|
-
openaiRequest.user = params.user;
|
|
59
|
-
}
|
|
60
|
-
if (params.logit_bias !== void 0) {
|
|
61
|
-
openaiRequest.logit_bias = params.logit_bias;
|
|
62
|
-
}
|
|
63
|
-
if (params.reasoning_effort !== void 0) {
|
|
64
|
-
openaiRequest.reasoning_effort = params.reasoning_effort;
|
|
65
|
-
}
|
|
66
|
-
if (params.verbosity !== void 0) {
|
|
67
|
-
openaiRequest.verbosity = params.verbosity;
|
|
68
|
-
}
|
|
69
|
-
if (params.service_tier !== void 0) {
|
|
70
|
-
openaiRequest.service_tier = params.service_tier;
|
|
71
|
-
}
|
|
72
|
-
if (params.store !== void 0) {
|
|
73
|
-
openaiRequest.store = params.store;
|
|
74
|
-
}
|
|
75
|
-
if (params.metadata !== void 0) {
|
|
76
|
-
openaiRequest.metadata = params.metadata;
|
|
77
|
-
}
|
|
78
|
-
if (params.prediction !== void 0) {
|
|
79
|
-
openaiRequest.prediction = params.prediction;
|
|
80
|
-
}
|
|
81
|
-
if (params.prompt_cache_key !== void 0) {
|
|
82
|
-
openaiRequest.prompt_cache_key = params.prompt_cache_key;
|
|
83
|
-
}
|
|
84
|
-
if (params.prompt_cache_retention !== void 0) {
|
|
85
|
-
openaiRequest.prompt_cache_retention = params.prompt_cache_retention;
|
|
86
|
-
}
|
|
87
|
-
if (params.safety_identifier !== void 0) {
|
|
88
|
-
openaiRequest.safety_identifier = params.safety_identifier;
|
|
89
|
-
}
|
|
90
26
|
if (request.tools && request.tools.length > 0) {
|
|
91
27
|
openaiRequest.tools = request.tools.map(transformTool);
|
|
92
|
-
if (params.parallel_tool_calls !== void 0) {
|
|
93
|
-
openaiRequest.parallel_tool_calls = params.parallel_tool_calls;
|
|
94
|
-
}
|
|
95
28
|
}
|
|
96
29
|
if (request.structure) {
|
|
97
30
|
const schema = {
|
|
@@ -112,8 +45,6 @@ function transformRequest(request, modelId) {
|
|
|
112
45
|
strict: true
|
|
113
46
|
}
|
|
114
47
|
};
|
|
115
|
-
} else if (params.response_format !== void 0) {
|
|
116
|
-
openaiRequest.response_format = params.response_format;
|
|
117
48
|
}
|
|
118
49
|
return openaiRequest;
|
|
119
50
|
}
|
|
@@ -619,57 +550,12 @@ function createCompletionsLLMHandler() {
|
|
|
619
550
|
function transformRequest2(request, modelId) {
|
|
620
551
|
const params = request.params ?? {};
|
|
621
552
|
const openaiRequest = {
|
|
553
|
+
...params,
|
|
622
554
|
model: modelId,
|
|
623
555
|
input: transformInputItems(request.messages, request.system)
|
|
624
556
|
};
|
|
625
|
-
if (params.temperature !== void 0) {
|
|
626
|
-
openaiRequest.temperature = params.temperature;
|
|
627
|
-
}
|
|
628
|
-
if (params.top_p !== void 0) {
|
|
629
|
-
openaiRequest.top_p = params.top_p;
|
|
630
|
-
}
|
|
631
|
-
if (params.max_output_tokens !== void 0) {
|
|
632
|
-
openaiRequest.max_output_tokens = params.max_output_tokens;
|
|
633
|
-
} else if (params.max_completion_tokens !== void 0) {
|
|
634
|
-
openaiRequest.max_output_tokens = params.max_completion_tokens;
|
|
635
|
-
} else if (params.max_tokens !== void 0) {
|
|
636
|
-
openaiRequest.max_output_tokens = params.max_tokens;
|
|
637
|
-
}
|
|
638
|
-
if (params.service_tier !== void 0) {
|
|
639
|
-
openaiRequest.service_tier = params.service_tier;
|
|
640
|
-
}
|
|
641
|
-
if (params.store !== void 0) {
|
|
642
|
-
openaiRequest.store = params.store;
|
|
643
|
-
}
|
|
644
|
-
if (params.metadata !== void 0) {
|
|
645
|
-
openaiRequest.metadata = params.metadata;
|
|
646
|
-
}
|
|
647
|
-
if (params.truncation !== void 0) {
|
|
648
|
-
openaiRequest.truncation = params.truncation;
|
|
649
|
-
}
|
|
650
|
-
if (params.include !== void 0) {
|
|
651
|
-
openaiRequest.include = params.include;
|
|
652
|
-
}
|
|
653
|
-
if (params.background !== void 0) {
|
|
654
|
-
openaiRequest.background = params.background;
|
|
655
|
-
}
|
|
656
|
-
if (params.previous_response_id !== void 0) {
|
|
657
|
-
openaiRequest.previous_response_id = params.previous_response_id;
|
|
658
|
-
}
|
|
659
|
-
if (params.reasoning !== void 0) {
|
|
660
|
-
openaiRequest.reasoning = { ...params.reasoning };
|
|
661
|
-
}
|
|
662
|
-
if (params.reasoning_effort !== void 0) {
|
|
663
|
-
openaiRequest.reasoning = {
|
|
664
|
-
...openaiRequest.reasoning ?? {},
|
|
665
|
-
effort: params.reasoning_effort
|
|
666
|
-
};
|
|
667
|
-
}
|
|
668
557
|
if (request.tools && request.tools.length > 0) {
|
|
669
558
|
openaiRequest.tools = request.tools.map(transformTool2);
|
|
670
|
-
if (params.parallel_tool_calls !== void 0) {
|
|
671
|
-
openaiRequest.parallel_tool_calls = params.parallel_tool_calls;
|
|
672
|
-
}
|
|
673
559
|
}
|
|
674
560
|
if (request.structure) {
|
|
675
561
|
const schema = {
|
package/dist/openai/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/providers/openai/transform.completions.ts","../../src/providers/openai/llm.completions.ts","../../src/providers/openai/transform.responses.ts","../../src/providers/openai/llm.responses.ts","../../src/providers/openai/index.ts"],"sourcesContent":["import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAILLMParams,\n OpenAICompletionsRequest,\n OpenAICompletionsMessage,\n OpenAIUserContent,\n OpenAICompletionsTool,\n OpenAICompletionsResponse,\n OpenAICompletionsStreamChunk,\n OpenAIToolCall,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Chat Completions format\n */\nexport function transformRequest<TParams extends OpenAILLMParams>(\n request: LLMRequest<TParams>,\n modelId: string\n): OpenAICompletionsRequest {\n const params: OpenAILLMParams = request.params ?? {};\n\n const openaiRequest: OpenAICompletionsRequest = {\n model: modelId,\n messages: transformMessages(request.messages, request.system),\n };\n\n // Model parameters\n if (params.temperature !== undefined) {\n openaiRequest.temperature = params.temperature;\n }\n if (params.top_p !== undefined) {\n openaiRequest.top_p = params.top_p;\n }\n if (params.max_completion_tokens !== undefined) {\n openaiRequest.max_completion_tokens = params.max_completion_tokens;\n } else if (params.max_tokens !== undefined) {\n openaiRequest.max_tokens = params.max_tokens;\n }\n if (params.frequency_penalty !== undefined) {\n openaiRequest.frequency_penalty = params.frequency_penalty;\n }\n if (params.presence_penalty !== undefined) {\n openaiRequest.presence_penalty = params.presence_penalty;\n }\n if (params.stop !== undefined) {\n openaiRequest.stop = params.stop;\n }\n if (params.n !== undefined) {\n openaiRequest.n = params.n;\n }\n if (params.logprobs !== undefined) {\n openaiRequest.logprobs = params.logprobs;\n }\n if (params.top_logprobs !== undefined) {\n openaiRequest.top_logprobs = params.top_logprobs;\n }\n if (params.seed !== undefined) {\n openaiRequest.seed = params.seed;\n }\n if (params.user !== undefined) {\n openaiRequest.user = params.user;\n }\n if (params.logit_bias !== undefined) {\n openaiRequest.logit_bias = params.logit_bias;\n }\n if (params.reasoning_effort !== undefined) {\n openaiRequest.reasoning_effort = params.reasoning_effort;\n }\n if (params.verbosity !== undefined) {\n openaiRequest.verbosity = params.verbosity;\n }\n if (params.service_tier !== undefined) {\n openaiRequest.service_tier = params.service_tier;\n }\n if (params.store !== undefined) {\n openaiRequest.store = params.store;\n }\n if (params.metadata !== undefined) {\n openaiRequest.metadata = params.metadata;\n }\n if (params.prediction !== undefined) {\n openaiRequest.prediction = params.prediction;\n }\n if (params.prompt_cache_key !== undefined) {\n openaiRequest.prompt_cache_key = params.prompt_cache_key;\n }\n if (params.prompt_cache_retention !== undefined) {\n openaiRequest.prompt_cache_retention = params.prompt_cache_retention;\n }\n if (params.safety_identifier !== undefined) {\n openaiRequest.safety_identifier = params.safety_identifier;\n }\n\n // Tools\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n if (params.parallel_tool_calls !== undefined) {\n openaiRequest.parallel_tool_calls = params.parallel_tool_calls;\n }\n }\n\n // Structured output via response_format\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.response_format = {\n type: 'json_schema',\n json_schema: {\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n } else if (params.response_format !== undefined) {\n // Pass through response_format from params if no structure is defined\n openaiRequest.response_format = params.response_format;\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages including system prompt\n */\nfunction transformMessages(\n messages: Message[],\n system?: string\n): OpenAICompletionsMessage[] {\n const result: OpenAICompletionsMessage[] = [];\n\n // Add system message first if present\n if (system) {\n result.push({\n role: 'system',\n content: system,\n });\n }\n\n // Transform each message\n for (const message of messages) {\n // Handle tool result messages specially - they need to produce multiple messages\n if (isToolResultMessage(message)) {\n const toolMessages = transformToolResults(message);\n result.push(...toolMessages);\n } else {\n const transformed = transformMessage(message);\n if (transformed) {\n result.push(transformed);\n }\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI format\n */\nfunction transformMessage(message: Message): OpenAICompletionsMessage | null {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return {\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n };\n }\n return {\n role: 'user',\n content: validContent.map(transformContentBlock),\n };\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Extract text content\n const textContent = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c) => c.text)\n .join('');\n\n const assistantMessage: OpenAICompletionsMessage = {\n role: 'assistant',\n content: textContent || null,\n };\n\n // Add tool calls if present\n if (message.toolCalls && message.toolCalls.length > 0) {\n (assistantMessage as { tool_calls?: OpenAIToolCall[] }).tool_calls =\n message.toolCalls.map((call) => ({\n id: call.toolCallId,\n type: 'function' as const,\n function: {\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n },\n }));\n }\n\n return assistantMessage;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are sent as individual tool messages\n // Return the first one and handle multiple in a different way\n // Actually, we need to return multiple messages for multiple tool results\n // This is handled by the caller - transform each result to a message\n const results = message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n\n // For now, return the first result - caller should handle multiple\n return results[0] ?? null;\n }\n\n return null;\n}\n\n/**\n * Transform multiple tool results to messages\n */\nexport function transformToolResults(\n message: Message\n): OpenAICompletionsMessage[] {\n if (!isToolResultMessage(message)) {\n const single = transformMessage(message);\n return single ? [single] : [];\n }\n\n return message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n}\n\n/**\n * Transform a content block to OpenAI format\n */\nfunction transformContentBlock(block: ContentBlock): OpenAIUserContent {\n switch (block.type) {\n case 'text':\n return { type: 'text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n let url: string;\n\n if (imageBlock.source.type === 'base64') {\n url = `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`;\n } else if (imageBlock.source.type === 'url') {\n url = imageBlock.source.url;\n } else if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n url = `data:${imageBlock.mimeType};base64,${base64}`;\n } else {\n throw new Error('Unknown image source type');\n }\n\n return {\n type: 'image_url',\n image_url: { url },\n };\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to OpenAI format\n */\nfunction transformTool(tool: Tool): OpenAICompletionsTool {\n return {\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n },\n };\n}\n\n/**\n * Transform OpenAI response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAICompletionsResponse): LLMResponse {\n const choice = data.choices[0];\n if (!choice) {\n throw new Error('No choices in OpenAI response');\n }\n\n // Extract text content\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (choice.message.content) {\n textContent.push({ type: 'text', text: choice.message.content });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(choice.message.content);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n let hadRefusal = false;\n if (choice.message.refusal) {\n textContent.push({ type: 'text', text: choice.message.refusal });\n hadRefusal = true;\n }\n\n // Extract tool calls\n const toolCalls: ToolCall[] = [];\n if (choice.message.tool_calls) {\n for (const call of choice.message.tool_calls) {\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(call.function.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: call.id,\n toolName: call.function.name,\n arguments: args,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n finish_reason: choice.finish_reason,\n system_fingerprint: data.system_fingerprint,\n service_tier: data.service_tier,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.prompt_tokens,\n outputTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (choice.finish_reason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface CompletionsStreamState {\n id: string;\n model: string;\n text: string;\n toolCalls: Map<number, { id: string; name: string; arguments: string }>;\n finishReason: string | null;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): CompletionsStreamState {\n return {\n id: '',\n model: '',\n text: '',\n toolCalls: new Map(),\n finishReason: null,\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI stream chunk to UPP StreamEvent\n * Returns array since one chunk may produce multiple events\n */\nexport function transformStreamEvent(\n chunk: OpenAICompletionsStreamChunk,\n state: CompletionsStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n // Update state with basic info\n if (chunk.id && !state.id) {\n state.id = chunk.id;\n events.push({ type: 'message_start', index: 0, delta: {} });\n }\n if (chunk.model) {\n state.model = chunk.model;\n }\n\n // Process choices\n const choice = chunk.choices[0];\n if (choice) {\n // Text delta\n if (choice.delta.content) {\n state.text += choice.delta.content;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.content },\n });\n }\n if (choice.delta.refusal) {\n state.hadRefusal = true;\n state.text += choice.delta.refusal;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.refusal },\n });\n }\n\n // Tool call deltas\n if (choice.delta.tool_calls) {\n for (const toolCallDelta of choice.delta.tool_calls) {\n const index = toolCallDelta.index;\n let toolCall = state.toolCalls.get(index);\n\n if (!toolCall) {\n toolCall = { id: '', name: '', arguments: '' };\n state.toolCalls.set(index, toolCall);\n }\n\n if (toolCallDelta.id) {\n toolCall.id = toolCallDelta.id;\n }\n if (toolCallDelta.function?.name) {\n toolCall.name = toolCallDelta.function.name;\n }\n if (toolCallDelta.function?.arguments) {\n toolCall.arguments += toolCallDelta.function.arguments;\n events.push({\n type: 'tool_call_delta',\n index: index,\n delta: {\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n argumentsJson: toolCallDelta.function.arguments,\n },\n });\n }\n }\n }\n\n // Finish reason\n if (choice.finish_reason) {\n state.finishReason = choice.finish_reason;\n events.push({ type: 'message_stop', index: 0, delta: {} });\n }\n }\n\n // Usage info (usually comes at the end with stream_options.include_usage)\n if (chunk.usage) {\n state.inputTokens = chunk.usage.prompt_tokens;\n state.outputTokens = chunk.usage.completion_tokens;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: CompletionsStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (state.text) {\n textContent.push({ type: 'text', text: state.text });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(state.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n\n const toolCalls: ToolCall[] = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n toolCalls.push({\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n arguments: args,\n });\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n finish_reason: state.finishReason,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (state.finishReason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (state.hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAILLMParams, OpenAICompletionsResponse, OpenAICompletionsStreamChunk } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.completions.ts';\n\nconst OPENAI_API_URL = 'https://api.openai.com/v1/chat/completions';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Chat Completions LLM handler\n */\nexport function createCompletionsLLMHandler(): LLMHandler<OpenAILLMParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAILLMParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAILLMParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAILLMParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAILLMParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAILLMParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAILLMParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAICompletionsResponse;\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAILLMParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n body.stream_options = { include_usage: true };\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const chunk = data as OpenAICompletionsStreamChunk;\n\n // Check for error in chunk\n if ('error' in chunk && chunk.error) {\n const errorData = chunk.error as { message?: string; type?: string };\n const error = new UPPError(\n errorData.message ?? 'Unknown error',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(chunk, state);\n for (const event of uppEvents) {\n yield event;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAILLMParams,\n OpenAIResponsesRequest,\n OpenAIResponsesInputItem,\n OpenAIResponsesContentPart,\n OpenAIResponsesTool,\n OpenAIResponsesResponse,\n OpenAIResponsesStreamEvent,\n OpenAIResponsesOutputItem,\n OpenAIResponsesMessageOutput,\n OpenAIResponsesFunctionCallOutput,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Responses API format\n */\nexport function transformRequest<TParams extends OpenAILLMParams>(\n request: LLMRequest<TParams>,\n modelId: string\n): OpenAIResponsesRequest {\n const params: OpenAILLMParams = request.params ?? {};\n\n const openaiRequest: OpenAIResponsesRequest = {\n model: modelId,\n input: transformInputItems(request.messages, request.system),\n };\n\n // Model parameters\n if (params.temperature !== undefined) {\n openaiRequest.temperature = params.temperature;\n }\n if (params.top_p !== undefined) {\n openaiRequest.top_p = params.top_p;\n }\n if (params.max_output_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_output_tokens;\n } else if (params.max_completion_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_completion_tokens;\n } else if (params.max_tokens !== undefined) {\n openaiRequest.max_output_tokens = params.max_tokens;\n }\n if (params.service_tier !== undefined) {\n openaiRequest.service_tier = params.service_tier;\n }\n if (params.store !== undefined) {\n openaiRequest.store = params.store;\n }\n if (params.metadata !== undefined) {\n openaiRequest.metadata = params.metadata;\n }\n if (params.truncation !== undefined) {\n openaiRequest.truncation = params.truncation;\n }\n if (params.include !== undefined) {\n openaiRequest.include = params.include;\n }\n if (params.background !== undefined) {\n openaiRequest.background = params.background;\n }\n if (params.previous_response_id !== undefined) {\n openaiRequest.previous_response_id = params.previous_response_id;\n }\n if (params.reasoning !== undefined) {\n openaiRequest.reasoning = { ...params.reasoning };\n }\n if (params.reasoning_effort !== undefined) {\n openaiRequest.reasoning = {\n ...(openaiRequest.reasoning ?? {}),\n effort: params.reasoning_effort,\n };\n }\n\n // Tools\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n if (params.parallel_tool_calls !== undefined) {\n openaiRequest.parallel_tool_calls = params.parallel_tool_calls;\n }\n }\n\n // Structured output via text.format\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.text = {\n format: {\n type: 'json_schema',\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages to Responses API input items\n */\nfunction transformInputItems(\n messages: Message[],\n system?: string\n): OpenAIResponsesInputItem[] | string {\n const result: OpenAIResponsesInputItem[] = [];\n\n if (system) {\n result.push({\n type: 'message',\n role: 'system',\n content: system,\n });\n }\n\n for (const message of messages) {\n const items = transformMessage(message);\n result.push(...items);\n }\n\n // If there's only one user message with simple text, return as string\n if (result.length === 1 && result[0]?.type === 'message') {\n const item = result[0] as { role?: string; content?: string | unknown[] };\n if (item.role === 'user' && typeof item.content === 'string') {\n return item.content;\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI Responses API input items\n */\nfunction transformMessage(message: Message): OpenAIResponsesInputItem[] {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return [\n {\n type: 'message',\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n },\n ];\n }\n return [\n {\n type: 'message',\n role: 'user',\n content: validContent.map(transformContentPart),\n },\n ];\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n const items: OpenAIResponsesInputItem[] = [];\n\n // Add message content - only text parts for assistant messages\n const contentParts: OpenAIResponsesContentPart[] = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c): OpenAIResponsesContentPart => ({\n type: 'output_text',\n text: c.text,\n }));\n\n // Add assistant message if we have text content\n if (contentParts.length > 0) {\n items.push({\n type: 'message',\n role: 'assistant',\n content: contentParts,\n });\n }\n\n // Add function_call items for each tool call (must precede function_call_output)\n const openaiMeta = message.metadata?.openai as\n | { functionCallItems?: Array<{ id: string; call_id: string; name: string; arguments: string }> }\n | undefined;\n const functionCallItems = openaiMeta?.functionCallItems;\n\n if (functionCallItems && functionCallItems.length > 0) {\n for (const fc of functionCallItems) {\n items.push({\n type: 'function_call',\n id: fc.id,\n call_id: fc.call_id,\n name: fc.name,\n arguments: fc.arguments,\n });\n }\n } else if (message.toolCalls && message.toolCalls.length > 0) {\n for (const call of message.toolCalls) {\n items.push({\n type: 'function_call',\n id: `fc_${call.toolCallId}`,\n call_id: call.toolCallId,\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n });\n }\n }\n\n return items;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are function_call_output items\n return message.results.map((result) => ({\n type: 'function_call_output' as const,\n call_id: result.toolCallId,\n output:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n }\n\n return [];\n}\n\n/**\n * Transform a content block to Responses API format\n */\nfunction transformContentPart(block: ContentBlock): OpenAIResponsesContentPart {\n switch (block.type) {\n case 'text':\n return { type: 'input_text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n if (imageBlock.source.type === 'base64') {\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`,\n };\n }\n\n if (imageBlock.source.type === 'url') {\n return {\n type: 'input_image',\n image_url: imageBlock.source.url,\n };\n }\n\n if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${base64}`,\n };\n }\n\n throw new Error('Unknown image source type');\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to Responses API format\n */\nfunction transformTool(tool: Tool): OpenAIResponsesTool {\n return {\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n };\n}\n\n/**\n * Transform OpenAI Responses API response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAIResponsesResponse): LLMResponse {\n // Extract text content and tool calls from output items\n const textContent: TextBlock[] = [];\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n let hadRefusal = false;\n let structuredData: unknown;\n\n for (const item of data.output) {\n if (item.type === 'message') {\n const messageItem = item as OpenAIResponsesMessageOutput;\n for (const content of messageItem.content) {\n if (content.type === 'output_text') {\n textContent.push({ type: 'text', text: content.text });\n // Try to parse as JSON for structured output (native JSON mode)\n // Only set data if text is valid JSON\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(content.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n } else if (content.type === 'refusal') {\n textContent.push({ type: 'text', text: content.refusal });\n hadRefusal = true;\n }\n }\n } else if (item.type === 'function_call') {\n const functionCall = item as OpenAIResponsesFunctionCallOutput;\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(functionCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: functionCall.call_id,\n toolName: functionCall.name,\n arguments: args,\n });\n functionCallItems.push({\n id: functionCall.id,\n call_id: functionCall.call_id,\n name: functionCall.name,\n arguments: functionCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n status: data.status,\n // Store response_id for multi-turn tool calling\n response_id: data.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.input_tokens,\n outputTokens: data.usage.output_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (data.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (data.status === 'incomplete') {\n stopReason = data.incomplete_details?.reason === 'max_output_tokens'\n ? 'max_tokens'\n : 'end_turn';\n } else if (data.status === 'failed') {\n stopReason = 'error';\n }\n if (hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface ResponsesStreamState {\n id: string;\n model: string;\n textByIndex: Map<number, string>;\n toolCalls: Map<\n number,\n { itemId?: string; callId?: string; name?: string; arguments: string }\n >;\n status: string;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): ResponsesStreamState {\n return {\n id: '',\n model: '',\n textByIndex: new Map(),\n toolCalls: new Map(),\n status: 'in_progress',\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI Responses API stream event to UPP StreamEvent\n * Returns array since one event may produce multiple UPP events\n */\nexport function transformStreamEvent(\n event: OpenAIResponsesStreamEvent,\n state: ResponsesStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n switch (event.type) {\n case 'response.created':\n state.id = event.response.id;\n state.model = event.response.model;\n events.push({ type: 'message_start', index: 0, delta: {} });\n break;\n\n case 'response.in_progress':\n state.status = 'in_progress';\n break;\n\n case 'response.completed':\n state.status = 'completed';\n if (event.response.usage) {\n state.inputTokens = event.response.usage.input_tokens;\n state.outputTokens = event.response.usage.output_tokens;\n }\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.failed':\n state.status = 'failed';\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.output_item.added':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_start',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_item.done':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_stop',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_text.delta':\n // Accumulate text\n const currentText = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentText + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n\n case 'response.output_text.done':\n state.textByIndex.set(event.output_index, event.text);\n break;\n\n case 'response.refusal.delta': {\n state.hadRefusal = true;\n const currentRefusal = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentRefusal + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n }\n\n case 'response.refusal.done':\n state.hadRefusal = true;\n state.textByIndex.set(event.output_index, event.refusal);\n break;\n\n case 'response.function_call_arguments.delta': {\n // Accumulate function call arguments\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id && !toolCall.itemId) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id && !toolCall.callId) {\n toolCall.callId = event.call_id;\n }\n toolCall.arguments += event.delta;\n events.push({\n type: 'tool_call_delta',\n index: event.output_index,\n delta: {\n toolCallId: toolCall.callId ?? toolCall.itemId ?? '',\n toolName: toolCall.name,\n argumentsJson: event.delta,\n },\n });\n break;\n }\n\n case 'response.function_call_arguments.done': {\n // Finalize function call\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id) {\n toolCall.callId = event.call_id;\n }\n toolCall.name = event.name;\n toolCall.arguments = event.arguments;\n break;\n }\n\n case 'error':\n // Error events are handled at the handler level\n break;\n\n default:\n // Ignore other events\n break;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: ResponsesStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n\n // Combine all text content\n for (const [, text] of state.textByIndex) {\n if (text) {\n textContent.push({ type: 'text', text });\n // Try to parse as JSON for structured output (native JSON mode)\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n }\n }\n\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n const itemId = toolCall.itemId ?? '';\n const callId = toolCall.callId ?? toolCall.itemId ?? '';\n const name = toolCall.name ?? '';\n toolCalls.push({\n toolCallId: callId,\n toolName: name,\n arguments: args,\n });\n\n if (itemId && callId && name) {\n functionCallItems.push({\n id: itemId,\n call_id: callId,\n name,\n arguments: toolCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n status: state.status,\n // Store response_id for multi-turn tool calling\n response_id: state.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (state.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (state.status === 'failed') {\n stopReason = 'error';\n }\n if (state.hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAILLMParams, OpenAIResponsesResponse, OpenAIResponsesStreamEvent, OpenAIResponseErrorEvent } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.responses.ts';\n\nconst OPENAI_RESPONSES_API_URL = 'https://api.openai.com/v1/responses';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Responses API LLM handler\n */\nexport function createResponsesLLMHandler(): LLMHandler<OpenAILLMParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAILLMParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAILLMParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAILLMParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAILLMParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAILLMParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAILLMParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAIResponsesResponse;\n\n // Check for error in response\n if (data.status === 'failed' && data.error) {\n throw new UPPError(\n data.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n }\n\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAILLMParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const event = data as OpenAIResponsesStreamEvent;\n\n // Check for error event\n if (event.type === 'error') {\n const errorEvent = event as OpenAIResponseErrorEvent;\n const error = new UPPError(\n errorEvent.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(event, state);\n for (const uppEvent of uppEvents) {\n yield uppEvent;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type {\n Provider,\n ModelReference,\n LLMHandler,\n LLMProvider,\n} from '../../types/provider.ts';\nimport { createCompletionsLLMHandler } from './llm.completions.ts';\nimport { createResponsesLLMHandler } from './llm.responses.ts';\nimport type { OpenAILLMParams, OpenAIConfig } from './types.ts';\n\n/**\n * OpenAI provider options\n */\nexport interface OpenAIProviderOptions {\n /**\n * Which API to use:\n * - 'responses': Modern Responses API (default, recommended)\n * - 'completions': Legacy Chat Completions API\n */\n api?: 'responses' | 'completions';\n}\n\n/**\n * OpenAI provider with configurable API mode\n *\n * @example\n * // Using the modern Responses API (default)\n * const model = openai('gpt-4o');\n *\n * @example\n * // Using the legacy Chat Completions API\n * const model = openai('gpt-4o', { api: 'completions' });\n *\n * @example\n * // Explicit Responses API\n * const model = openai('gpt-4o', { api: 'responses' });\n */\nexport interface OpenAIProvider extends Provider<OpenAIProviderOptions> {\n /**\n * Create a model reference\n * @param modelId - The model identifier (e.g., 'gpt-4o', 'gpt-4-turbo', 'o1-preview')\n * @param options - Provider options including API selection\n */\n (modelId: string, options?: OpenAIProviderOptions): ModelReference<OpenAIProviderOptions>;\n\n /** Provider name */\n readonly name: 'openai';\n\n /** Provider version */\n readonly version: string;\n\n /** Supported modalities */\n readonly modalities: {\n llm: LLMHandler<OpenAILLMParams>;\n };\n}\n\n/**\n * Create the OpenAI provider\n */\nfunction createOpenAIProvider(): OpenAIProvider {\n // Track which API mode is currently active for the modalities\n let currentApiMode: 'responses' | 'completions' = 'responses';\n\n // Create handlers eagerly so we can inject provider reference\n const responsesHandler = createResponsesLLMHandler();\n const completionsHandler = createCompletionsLLMHandler();\n\n const fn = function (\n modelId: string,\n options?: OpenAIProviderOptions\n ): ModelReference<OpenAIProviderOptions> {\n const apiMode = options?.api ?? 'responses';\n currentApiMode = apiMode;\n return { modelId, provider };\n };\n\n // Create a dynamic modalities object that returns the correct handler\n const modalities = {\n get llm(): LLMHandler<OpenAILLMParams> {\n return currentApiMode === 'completions'\n ? completionsHandler\n : responsesHandler;\n },\n };\n\n // Define properties\n Object.defineProperties(fn, {\n name: {\n value: 'openai',\n writable: false,\n configurable: true,\n },\n version: {\n value: '1.0.0',\n writable: false,\n configurable: true,\n },\n modalities: {\n value: modalities,\n writable: false,\n configurable: true,\n },\n });\n\n const provider = fn as OpenAIProvider;\n\n // Inject provider reference into both handlers (spec compliance)\n responsesHandler._setProvider?.(provider as unknown as LLMProvider<OpenAILLMParams>);\n completionsHandler._setProvider?.(provider as unknown as LLMProvider<OpenAILLMParams>);\n\n return provider;\n}\n\n/**\n * OpenAI provider\n *\n * Supports both the modern Responses API (default) and legacy Chat Completions API.\n *\n * @example\n * ```ts\n * import { openai } from './providers/openai';\n * import { llm } from './core/llm';\n *\n * // Using Responses API (default, modern, recommended)\n * const model = llm({\n * model: openai('gpt-4o'),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Using Chat Completions API (legacy)\n * const legacyModel = llm({\n * model: openai('gpt-4o', { api: 'completions' }),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Generate\n * const turn = await model.generate('Hello!');\n * console.log(turn.response.text);\n * ```\n */\nexport const openai = createOpenAIProvider();\n\n// Re-export types\nexport type {\n OpenAILLMParams,\n OpenAIConfig,\n OpenAIAPIMode,\n OpenAIModelOptions,\n OpenAIModelReference,\n} from './types.ts';\n"],"mappings":";;;;;;;;;;;;;;;;;;AA0BO,SAAS,iBACd,SACA,SAC0B;AAC1B,QAAM,SAA0B,QAAQ,UAAU,CAAC;AAEnD,QAAM,gBAA0C;AAAA,IAC9C,OAAO;AAAA,IACP,UAAU,kBAAkB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC9D;AAGA,MAAI,OAAO,gBAAgB,QAAW;AACpC,kBAAc,cAAc,OAAO;AAAA,EACrC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,0BAA0B,QAAW;AAC9C,kBAAc,wBAAwB,OAAO;AAAA,EAC/C,WAAW,OAAO,eAAe,QAAW;AAC1C,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,MAAM,QAAW;AAC1B,kBAAc,IAAI,OAAO;AAAA,EAC3B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,SAAS,QAAW;AAC7B,kBAAc,OAAO,OAAO;AAAA,EAC9B;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,cAAc,QAAW;AAClC,kBAAc,YAAY,OAAO;AAAA,EACnC;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,mBAAmB,OAAO;AAAA,EAC1C;AACA,MAAI,OAAO,2BAA2B,QAAW;AAC/C,kBAAc,yBAAyB,OAAO;AAAA,EAChD;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAI,aAAa;AACrD,QAAI,OAAO,wBAAwB,QAAW;AAC5C,oBAAc,sBAAsB,OAAO;AAAA,IAC7C;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,kBAAkB;AAAA,MAC9B,MAAM;AAAA,MACN,aAAa;AAAA,QACX,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF,WAAW,OAAO,oBAAoB,QAAW;AAE/C,kBAAc,kBAAkB,OAAO;AAAA,EACzC;AAEA,SAAO;AACT;AAKA,SAAS,kBACP,UACA,QAC4B;AAC5B,QAAM,SAAqC,CAAC;AAG5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,aAAW,WAAW,UAAU;AAE9B,QAAI,oBAAoB,OAAO,GAAG;AAChC,YAAM,eAAe,qBAAqB,OAAO;AACjD,aAAO,KAAK,GAAG,YAAY;AAAA,IAC7B,OAAO;AACL,YAAM,cAAc,iBAAiB,OAAO;AAC5C,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,mBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAAS,iBAAiB,SAAmD;AAC3E,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAU,aAAa,CAAC,EAAgB;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,aAAa,IAAI,qBAAqB;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,UAAM,cAAc,aACjB,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AAEV,UAAM,mBAA6C;AAAA,MACjD,MAAM;AAAA,MACN,SAAS,eAAe;AAAA,IAC1B;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AACrD,MAAC,iBAAuD,aACtD,QAAQ,UAAU,IAAI,CAAC,UAAU;AAAA,QAC/B,IAAI,KAAK;AAAA,QACT,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C;AAAA,MACF,EAAE;AAAA,IACN;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAKhC,UAAM,UAAU,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MAC/C,MAAM;AAAA,MACN,cAAc,OAAO;AAAA,MACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAGF,WAAO,QAAQ,CAAC,KAAK;AAAA,EACvB;AAEA,SAAO;AACT;AAKO,SAAS,qBACd,SAC4B;AAC5B,MAAI,CAAC,oBAAoB,OAAO,GAAG;AACjC,UAAM,SAAS,iBAAiB,OAAO;AACvC,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,EAC9B;AAEA,SAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,IACtC,MAAM;AAAA,IACN,cAAc,OAAO;AAAA,IACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,EACpC,EAAE;AACJ;AAKA,SAAS,sBAAsB,OAAwC;AACrE,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,IAE1C,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI;AAEJ,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,cAAM,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,MACpE,WAAW,WAAW,OAAO,SAAS,OAAO;AAC3C,cAAM,WAAW,OAAO;AAAA,MAC1B,WAAW,WAAW,OAAO,SAAS,SAAS;AAE7C,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,cAAM,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,MACpD,OAAO;AACL,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,WAAW,EAAE,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAAS,cAAc,MAAmC;AACxD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY,KAAK,WAAW;AAAA,QAC5B,UAAU,KAAK,WAAW;AAAA,QAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,kBAAkB,MAA8C;AAC9E,QAAM,SAAS,KAAK,QAAQ,CAAC;AAC7B,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAGA,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAE/D,QAAI;AACF,uBAAiB,KAAK,MAAM,OAAO,QAAQ,OAAO;AAAA,IACpD,QAAQ;AAAA,IAER;AAAA,EACF;AACA,MAAI,aAAa;AACjB,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAC/D,iBAAa;AAAA,EACf;AAGA,QAAM,YAAwB,CAAC;AAC/B,MAAI,OAAO,QAAQ,YAAY;AAC7B,eAAW,QAAQ,OAAO,QAAQ,YAAY;AAC5C,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,KAAK,SAAS,SAAS;AAAA,MAC3C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK,SAAS;AAAA,QACxB,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,eAAe,OAAO;AAAA,UACtB,oBAAoB,KAAK;AAAA,UACzB,cAAc,KAAK;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,UAAQ,OAAO,eAAe;AAAA,IAC5B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,cAAc,eAAe,kBAAkB;AACjD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAmBO,SAAS,oBAA4C;AAC1D,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,WAAW,oBAAI,IAAI;AAAA,IACnB,cAAc;AAAA,IACd,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAAS,qBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAG/B,MAAI,MAAM,MAAM,CAAC,MAAM,IAAI;AACzB,UAAM,KAAK,MAAM;AACjB,WAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,EAC5D;AACA,MAAI,MAAM,OAAO;AACf,UAAM,QAAQ,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,MAAI,QAAQ;AAEV,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AACA,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,aAAa;AACnB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,MAAM,YAAY;AAC3B,iBAAW,iBAAiB,OAAO,MAAM,YAAY;AACnD,cAAM,QAAQ,cAAc;AAC5B,YAAI,WAAW,MAAM,UAAU,IAAI,KAAK;AAExC,YAAI,CAAC,UAAU;AACb,qBAAW,EAAE,IAAI,IAAI,MAAM,IAAI,WAAW,GAAG;AAC7C,gBAAM,UAAU,IAAI,OAAO,QAAQ;AAAA,QACrC;AAEA,YAAI,cAAc,IAAI;AACpB,mBAAS,KAAK,cAAc;AAAA,QAC9B;AACA,YAAI,cAAc,UAAU,MAAM;AAChC,mBAAS,OAAO,cAAc,SAAS;AAAA,QACzC;AACA,YAAI,cAAc,UAAU,WAAW;AACrC,mBAAS,aAAa,cAAc,SAAS;AAC7C,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN;AAAA,YACA,OAAO;AAAA,cACL,YAAY,SAAS;AAAA,cACrB,UAAU,SAAS;AAAA,cACnB,eAAe,cAAc,SAAS;AAAA,YACxC;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,eAAe;AACxB,YAAM,eAAe,OAAO;AAC5B,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,IAC3D;AAAA,EACF;AAGA,MAAI,MAAM,OAAO;AACf,UAAM,cAAc,MAAM,MAAM;AAChC,UAAM,eAAe,MAAM,MAAM;AAAA,EACnC;AAEA,SAAO;AACT;AAKO,SAAS,uBAAuB,OAA4C;AACjF,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,MAAM,MAAM;AACd,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAEnD,QAAI;AACF,uBAAiB,KAAK,MAAM,MAAM,IAAI;AAAA,IACxC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,cAAU,KAAK;AAAA,MACb,YAAY,SAAS;AAAA,MACrB,UAAU,SAAS;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,eAAe,MAAM;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,UAAQ,MAAM,cAAc;AAAA,IAC1B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,MAAM,cAAc,eAAe,kBAAkB;AACvD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AClmBA,IAAM,iBAAiB;AAKvB,IAAM,sBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,8BAA2D;AAEzE,MAAI,cAAmD;AAEvD,SAAO;AAAA,IACL,aAAa,UAAwC;AACnD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAiD;AAEpD,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAwC;AAAA,QAC5C;AAAA,QACA,cAAc;AAAA,QAEd,IAAI,WAAyC;AAC3C,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAA4D;AACzE,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAO,iBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,iBAAO,kBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAAuD;AAC5D,gBAAM,QAAQ,kBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAO,iBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AACd,mBAAK,iBAAiB,EAAE,eAAe,KAAK;AAE5C,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,0BAAM,YAAY,MAAM;AACxB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,UAAU,WAAW;AAAA,sBACrB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAY,qBAAqB,OAAO,KAAK;AACnD,6BAAW,SAAS,WAAW;AAC7B,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgB,uBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AC5KO,SAASA,kBACd,SACA,SACwB;AACxB,QAAM,SAA0B,QAAQ,UAAU,CAAC;AAEnD,QAAM,gBAAwC;AAAA,IAC5C,OAAO;AAAA,IACP,OAAO,oBAAoB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC7D;AAGA,MAAI,OAAO,gBAAgB,QAAW;AACpC,kBAAc,cAAc,OAAO;AAAA,EACrC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,sBAAsB,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C,WAAW,OAAO,0BAA0B,QAAW;AACrD,kBAAc,oBAAoB,OAAO;AAAA,EAC3C,WAAW,OAAO,eAAe,QAAW;AAC1C,kBAAc,oBAAoB,OAAO;AAAA,EAC3C;AACA,MAAI,OAAO,iBAAiB,QAAW;AACrC,kBAAc,eAAe,OAAO;AAAA,EACtC;AACA,MAAI,OAAO,UAAU,QAAW;AAC9B,kBAAc,QAAQ,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,aAAa,QAAW;AACjC,kBAAc,WAAW,OAAO;AAAA,EAClC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,YAAY,QAAW;AAChC,kBAAc,UAAU,OAAO;AAAA,EACjC;AACA,MAAI,OAAO,eAAe,QAAW;AACnC,kBAAc,aAAa,OAAO;AAAA,EACpC;AACA,MAAI,OAAO,yBAAyB,QAAW;AAC7C,kBAAc,uBAAuB,OAAO;AAAA,EAC9C;AACA,MAAI,OAAO,cAAc,QAAW;AAClC,kBAAc,YAAY,EAAE,GAAG,OAAO,UAAU;AAAA,EAClD;AACA,MAAI,OAAO,qBAAqB,QAAW;AACzC,kBAAc,YAAY;AAAA,MACxB,GAAI,cAAc,aAAa,CAAC;AAAA,MAChC,QAAQ,OAAO;AAAA,IACjB;AAAA,EACF;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAIC,cAAa;AACrD,QAAI,OAAO,wBAAwB,QAAW;AAC5C,oBAAc,sBAAsB,OAAO;AAAA,IAC7C;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,OAAO;AAAA,MACnB,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,oBACP,UACA,QACqC;AACrC,QAAM,SAAqC,CAAC;AAE5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQC,kBAAiB,OAAO;AACtC,WAAO,KAAK,GAAG,KAAK;AAAA,EACtB;AAGA,MAAI,OAAO,WAAW,KAAK,OAAO,CAAC,GAAG,SAAS,WAAW;AACxD,UAAM,OAAO,OAAO,CAAC;AACrB,QAAI,KAAK,SAAS,UAAU,OAAO,KAAK,YAAY,UAAU;AAC5D,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAASC,oBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAASD,kBAAiB,SAA8C;AACtE,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAeC,oBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAU,aAAa,CAAC,EAAgB;AAAA,QAC1C;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS,aAAa,IAAI,oBAAoB;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAeA,oBAAmB,QAAQ,OAAO;AACvD,UAAM,QAAoC,CAAC;AAG3C,UAAM,eAA6C,aAChD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,OAAmC;AAAA,MACvC,MAAM;AAAA,MACN,MAAM,EAAE;AAAA,IACV,EAAE;AAGJ,QAAI,aAAa,SAAS,GAAG;AAC3B,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,aAAa,QAAQ,UAAU;AAGrC,UAAM,oBAAoB,YAAY;AAEtC,QAAI,qBAAqB,kBAAkB,SAAS,GAAG;AACrD,iBAAW,MAAM,mBAAmB;AAClC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,SAAS,GAAG;AAAA,UACZ,MAAM,GAAG;AAAA,UACT,WAAW,GAAG;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC5D,iBAAW,QAAQ,QAAQ,WAAW;AACpC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,MAAM,KAAK,UAAU;AAAA,UACzB,SAAS,KAAK;AAAA,UACd,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAEhC,WAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MACtC,MAAM;AAAA,MACN,SAAS,OAAO;AAAA,MAChB,QACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAAA,EACJ;AAEA,SAAO,CAAC;AACV;AAKA,SAAS,qBAAqB,OAAiD;AAC7E,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,cAAc,MAAM,MAAM,KAAK;AAAA,IAEhD,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,QACzE;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,OAAO;AACpC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,WAAW,OAAO;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,SAAS;AAEtC,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,QACzD;AAAA,MACF;AAEA,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAASF,eAAc,MAAiC;AACtD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,YAAY;AAAA,MACV,MAAM;AAAA,MACN,YAAY,KAAK,WAAW;AAAA,MAC5B,UAAU,KAAK,WAAW;AAAA,MAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAASG,mBAAkB,MAA4C;AAE5E,QAAM,cAA2B,CAAC;AAClC,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,MAAI,aAAa;AACjB,MAAI;AAEJ,aAAW,QAAQ,KAAK,QAAQ;AAC9B,QAAI,KAAK,SAAS,WAAW;AAC3B,YAAM,cAAc;AACpB,iBAAW,WAAW,YAAY,SAAS;AACzC,YAAI,QAAQ,SAAS,eAAe;AAClC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,KAAK,CAAC;AAGrD,cAAI,mBAAmB,QAAW;AAChC,gBAAI;AACF,+BAAiB,KAAK,MAAM,QAAQ,IAAI;AAAA,YAC1C,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,WAAW,QAAQ,SAAS,WAAW;AACrC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AACxD,uBAAa;AAAA,QACf;AAAA,MACF;AAAA,IACF,WAAW,KAAK,SAAS,iBAAiB;AACxC,YAAM,eAAe;AACrB,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,aAAa,SAAS;AAAA,MAC1C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,aAAa;AAAA,QACzB,UAAU,aAAa;AAAA,QACvB,WAAW;AAAA,MACb,CAAC;AACD,wBAAkB,KAAK;AAAA,QACrB,IAAI,aAAa;AAAA,QACjB,SAAS,aAAa;AAAA,QACtB,MAAM,aAAa;AAAA,QACnB,WAAW,aAAa;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,QAAQ,KAAK;AAAA;AAAA,UAEb,aAAa,KAAK;AAAA,UAClB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,MAAI,KAAK,WAAW,aAAa;AAC/B,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,KAAK,WAAW,cAAc;AACvC,iBAAa,KAAK,oBAAoB,WAAW,sBAC7C,eACA;AAAA,EACN,WAAW,KAAK,WAAW,UAAU;AACnC,iBAAa;AAAA,EACf;AACA,MAAI,cAAc,eAAe,SAAS;AACxC,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAsBO,SAASC,qBAA0C;AACxD,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,aAAa,oBAAI,IAAI;AAAA,IACrB,WAAW,oBAAI,IAAI;AAAA,IACnB,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAASC,sBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAE/B,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,YAAM,KAAK,MAAM,SAAS;AAC1B,YAAM,QAAQ,MAAM,SAAS;AAC7B,aAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAC1D;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,UAAI,MAAM,SAAS,OAAO;AACxB,cAAM,cAAc,MAAM,SAAS,MAAM;AACzC,cAAM,eAAe,MAAM,SAAS,MAAM;AAAA,MAC5C;AACA,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AAEH,YAAM,cAAc,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACjE,YAAM,YAAY,IAAI,MAAM,cAAc,cAAc,MAAM,KAAK;AACnE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IAEF,KAAK;AACH,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,IAAI;AACpD;AAAA,IAEF,KAAK,0BAA0B;AAC7B,YAAM,aAAa;AACnB,YAAM,iBAAiB,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACpE,YAAM,YAAY,IAAI,MAAM,cAAc,iBAAiB,MAAM,KAAK;AACtE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK;AACH,YAAM,aAAa;AACnB,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,OAAO;AACvD;AAAA,IAEF,KAAK,0CAA0C;AAE7C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,aAAa,MAAM;AAC5B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO;AAAA,UACL,YAAY,SAAS,UAAU,SAAS,UAAU;AAAA,UAClD,UAAU,SAAS;AAAA,UACnB,eAAe,MAAM;AAAA,QACvB;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK,yCAAyC;AAE5C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,OAAO,MAAM;AACtB,eAAS,YAAY,MAAM;AAC3B;AAAA,IACF;AAAA,IAEA,KAAK;AAEH;AAAA,IAEF;AAEE;AAAA,EACJ;AAEA,SAAO;AACT;AAKO,SAASC,wBAAuB,OAA0C;AAC/E,QAAM,cAA2B,CAAC;AAClC,MAAI;AAGJ,aAAW,CAAC,EAAE,IAAI,KAAK,MAAM,aAAa;AACxC,QAAI,MAAM;AACR,kBAAY,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAEvC,UAAI,mBAAmB,QAAW;AAChC,YAAI;AACF,2BAAiB,KAAK,MAAM,IAAI;AAAA,QAClC,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,UAAM,SAAS,SAAS,UAAU;AAClC,UAAM,SAAS,SAAS,UAAU,SAAS,UAAU;AACrD,UAAM,OAAO,SAAS,QAAQ;AAC9B,cAAU,KAAK;AAAA,MACb,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,WAAW;AAAA,IACb,CAAC;AAED,QAAI,UAAU,UAAU,MAAM;AAC5B,wBAAkB,KAAK;AAAA,QACrB,IAAI;AAAA,QACJ,SAAS;AAAA,QACT;AAAA,QACA,WAAW,SAAS;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA;AAAA,UAEd,aAAa,MAAM;AAAA,UACnB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,MAAI,MAAM,WAAW,aAAa;AAChC,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,MAAM,WAAW,UAAU;AACpC,iBAAa;AAAA,EACf;AACA,MAAI,MAAM,cAAc,eAAe,SAAS;AAC9C,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AC5rBA,IAAM,2BAA2B;AAKjC,IAAMC,uBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,4BAAyD;AAEvE,MAAI,cAAmD;AAEvD,SAAO;AAAA,IACL,aAAa,UAAwC;AACnD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAiD;AAEpD,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAwC;AAAA,QAC5C;AAAA,QACA,cAAcA;AAAA,QAEd,IAAI,WAAyC;AAC3C,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAA4D;AACzE,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAOC,kBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAGlC,cAAI,KAAK,WAAW,YAAY,KAAK,OAAO;AAC1C,kBAAM,IAAI;AAAA,cACR,KAAK,MAAM;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAEA,iBAAOC,mBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAAuD;AAC5D,gBAAM,QAAQC,mBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAOF,kBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AAEd,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,MAAM,SAAS,SAAS;AAC1B,0BAAM,aAAa;AACnB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,WAAW,MAAM;AAAA,sBACjB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAYG,sBAAqB,OAAO,KAAK;AACnD,6BAAW,YAAY,WAAW;AAChC,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgBC,wBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACtJA,SAAS,uBAAuC;AAE9C,MAAI,iBAA8C;AAGlD,QAAM,mBAAmB,0BAA0B;AACnD,QAAM,qBAAqB,4BAA4B;AAEvD,QAAM,KAAK,SACT,SACA,SACuC;AACvC,UAAM,UAAU,SAAS,OAAO;AAChC,qBAAiB;AACjB,WAAO,EAAE,SAAS,SAAS;AAAA,EAC7B;AAGA,QAAM,aAAa;AAAA,IACjB,IAAI,MAAmC;AACrC,aAAO,mBAAmB,gBACtB,qBACA;AAAA,IACN;AAAA,EACF;AAGA,SAAO,iBAAiB,IAAI;AAAA,IAC1B,MAAM;AAAA,MACJ,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,YAAY;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,EACF,CAAC;AAED,QAAM,WAAW;AAGjB,mBAAiB,eAAe,QAAmD;AACnF,qBAAmB,eAAe,QAAmD;AAErF,SAAO;AACT;AA6BO,IAAM,SAAS,qBAAqB;","names":["transformRequest","transformTool","transformMessage","filterValidContent","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState","OPENAI_CAPABILITIES","transformRequest","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState"]}
|
|
1
|
+
{"version":3,"sources":["../../src/providers/openai/transform.completions.ts","../../src/providers/openai/llm.completions.ts","../../src/providers/openai/transform.responses.ts","../../src/providers/openai/llm.responses.ts","../../src/providers/openai/index.ts"],"sourcesContent":["import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAICompletionsParams,\n OpenAICompletionsRequest,\n OpenAICompletionsMessage,\n OpenAIUserContent,\n OpenAICompletionsTool,\n OpenAICompletionsResponse,\n OpenAICompletionsStreamChunk,\n OpenAIToolCall,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Chat Completions format\n *\n * Params are spread directly to allow pass-through of any OpenAI API fields,\n * even those not explicitly defined in our type. This enables developers to\n * use new API features without waiting for library updates.\n */\nexport function transformRequest(\n request: LLMRequest<OpenAICompletionsParams>,\n modelId: string\n): OpenAICompletionsRequest {\n const params = request.params ?? ({} as OpenAICompletionsParams);\n\n // Spread params to pass through all fields, then set required fields\n const openaiRequest: OpenAICompletionsRequest = {\n ...params,\n model: modelId,\n messages: transformMessages(request.messages, request.system),\n };\n\n // Tools come from request, not params\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n }\n\n // Structured output via response_format (overrides params.response_format if set)\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.response_format = {\n type: 'json_schema',\n json_schema: {\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages including system prompt\n */\nfunction transformMessages(\n messages: Message[],\n system?: string\n): OpenAICompletionsMessage[] {\n const result: OpenAICompletionsMessage[] = [];\n\n // Add system message first if present\n if (system) {\n result.push({\n role: 'system',\n content: system,\n });\n }\n\n // Transform each message\n for (const message of messages) {\n // Handle tool result messages specially - they need to produce multiple messages\n if (isToolResultMessage(message)) {\n const toolMessages = transformToolResults(message);\n result.push(...toolMessages);\n } else {\n const transformed = transformMessage(message);\n if (transformed) {\n result.push(transformed);\n }\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI format\n */\nfunction transformMessage(message: Message): OpenAICompletionsMessage | null {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return {\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n };\n }\n return {\n role: 'user',\n content: validContent.map(transformContentBlock),\n };\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Extract text content\n const textContent = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c) => c.text)\n .join('');\n\n const assistantMessage: OpenAICompletionsMessage = {\n role: 'assistant',\n content: textContent || null,\n };\n\n // Add tool calls if present\n if (message.toolCalls && message.toolCalls.length > 0) {\n (assistantMessage as { tool_calls?: OpenAIToolCall[] }).tool_calls =\n message.toolCalls.map((call) => ({\n id: call.toolCallId,\n type: 'function' as const,\n function: {\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n },\n }));\n }\n\n return assistantMessage;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are sent as individual tool messages\n // Return the first one and handle multiple in a different way\n // Actually, we need to return multiple messages for multiple tool results\n // This is handled by the caller - transform each result to a message\n const results = message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n\n // For now, return the first result - caller should handle multiple\n return results[0] ?? null;\n }\n\n return null;\n}\n\n/**\n * Transform multiple tool results to messages\n */\nexport function transformToolResults(\n message: Message\n): OpenAICompletionsMessage[] {\n if (!isToolResultMessage(message)) {\n const single = transformMessage(message);\n return single ? [single] : [];\n }\n\n return message.results.map((result) => ({\n role: 'tool' as const,\n tool_call_id: result.toolCallId,\n content:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n}\n\n/**\n * Transform a content block to OpenAI format\n */\nfunction transformContentBlock(block: ContentBlock): OpenAIUserContent {\n switch (block.type) {\n case 'text':\n return { type: 'text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n let url: string;\n\n if (imageBlock.source.type === 'base64') {\n url = `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`;\n } else if (imageBlock.source.type === 'url') {\n url = imageBlock.source.url;\n } else if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n url = `data:${imageBlock.mimeType};base64,${base64}`;\n } else {\n throw new Error('Unknown image source type');\n }\n\n return {\n type: 'image_url',\n image_url: { url },\n };\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to OpenAI format\n */\nfunction transformTool(tool: Tool): OpenAICompletionsTool {\n return {\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n },\n };\n}\n\n/**\n * Transform OpenAI response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAICompletionsResponse): LLMResponse {\n const choice = data.choices[0];\n if (!choice) {\n throw new Error('No choices in OpenAI response');\n }\n\n // Extract text content\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (choice.message.content) {\n textContent.push({ type: 'text', text: choice.message.content });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(choice.message.content);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n let hadRefusal = false;\n if (choice.message.refusal) {\n textContent.push({ type: 'text', text: choice.message.refusal });\n hadRefusal = true;\n }\n\n // Extract tool calls\n const toolCalls: ToolCall[] = [];\n if (choice.message.tool_calls) {\n for (const call of choice.message.tool_calls) {\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(call.function.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: call.id,\n toolName: call.function.name,\n arguments: args,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n finish_reason: choice.finish_reason,\n system_fingerprint: data.system_fingerprint,\n service_tier: data.service_tier,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.prompt_tokens,\n outputTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (choice.finish_reason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface CompletionsStreamState {\n id: string;\n model: string;\n text: string;\n toolCalls: Map<number, { id: string; name: string; arguments: string }>;\n finishReason: string | null;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): CompletionsStreamState {\n return {\n id: '',\n model: '',\n text: '',\n toolCalls: new Map(),\n finishReason: null,\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI stream chunk to UPP StreamEvent\n * Returns array since one chunk may produce multiple events\n */\nexport function transformStreamEvent(\n chunk: OpenAICompletionsStreamChunk,\n state: CompletionsStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n // Update state with basic info\n if (chunk.id && !state.id) {\n state.id = chunk.id;\n events.push({ type: 'message_start', index: 0, delta: {} });\n }\n if (chunk.model) {\n state.model = chunk.model;\n }\n\n // Process choices\n const choice = chunk.choices[0];\n if (choice) {\n // Text delta\n if (choice.delta.content) {\n state.text += choice.delta.content;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.content },\n });\n }\n if (choice.delta.refusal) {\n state.hadRefusal = true;\n state.text += choice.delta.refusal;\n events.push({\n type: 'text_delta',\n index: 0,\n delta: { text: choice.delta.refusal },\n });\n }\n\n // Tool call deltas\n if (choice.delta.tool_calls) {\n for (const toolCallDelta of choice.delta.tool_calls) {\n const index = toolCallDelta.index;\n let toolCall = state.toolCalls.get(index);\n\n if (!toolCall) {\n toolCall = { id: '', name: '', arguments: '' };\n state.toolCalls.set(index, toolCall);\n }\n\n if (toolCallDelta.id) {\n toolCall.id = toolCallDelta.id;\n }\n if (toolCallDelta.function?.name) {\n toolCall.name = toolCallDelta.function.name;\n }\n if (toolCallDelta.function?.arguments) {\n toolCall.arguments += toolCallDelta.function.arguments;\n events.push({\n type: 'tool_call_delta',\n index: index,\n delta: {\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n argumentsJson: toolCallDelta.function.arguments,\n },\n });\n }\n }\n }\n\n // Finish reason\n if (choice.finish_reason) {\n state.finishReason = choice.finish_reason;\n events.push({ type: 'message_stop', index: 0, delta: {} });\n }\n }\n\n // Usage info (usually comes at the end with stream_options.include_usage)\n if (chunk.usage) {\n state.inputTokens = chunk.usage.prompt_tokens;\n state.outputTokens = chunk.usage.completion_tokens;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: CompletionsStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n if (state.text) {\n textContent.push({ type: 'text', text: state.text });\n // Try to parse as JSON for structured output (native JSON mode)\n try {\n structuredData = JSON.parse(state.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n\n const toolCalls: ToolCall[] = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n toolCalls.push({\n toolCallId: toolCall.id,\n toolName: toolCall.name,\n arguments: args,\n });\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n finish_reason: state.finishReason,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map finish reason to stop reason\n let stopReason = 'end_turn';\n switch (state.finishReason) {\n case 'stop':\n stopReason = 'end_turn';\n break;\n case 'length':\n stopReason = 'max_tokens';\n break;\n case 'tool_calls':\n stopReason = 'tool_use';\n break;\n case 'content_filter':\n stopReason = 'content_filter';\n break;\n }\n if (state.hadRefusal && stopReason !== 'content_filter') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAICompletionsParams, OpenAICompletionsResponse, OpenAICompletionsStreamChunk } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.completions.ts';\n\nconst OPENAI_API_URL = 'https://api.openai.com/v1/chat/completions';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Chat Completions LLM handler\n */\nexport function createCompletionsLLMHandler(): LLMHandler<OpenAICompletionsParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAICompletionsParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAICompletionsParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAICompletionsParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAICompletionsParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAICompletionsParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAICompletionsParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAICompletionsResponse;\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAICompletionsParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n body.stream_options = { include_usage: true };\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const chunk = data as OpenAICompletionsStreamChunk;\n\n // Check for error in chunk\n if ('error' in chunk && chunk.error) {\n const errorData = chunk.error as { message?: string; type?: string };\n const error = new UPPError(\n errorData.message ?? 'Unknown error',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(chunk, state);\n for (const event of uppEvents) {\n yield event;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type { LLMRequest, LLMResponse } from '../../types/llm.ts';\nimport type { Message } from '../../types/messages.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { Tool, ToolCall } from '../../types/tool.ts';\nimport type { TokenUsage } from '../../types/turn.ts';\nimport type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';\nimport {\n AssistantMessage,\n isUserMessage,\n isAssistantMessage,\n isToolResultMessage,\n} from '../../types/messages.ts';\nimport type {\n OpenAIResponsesParams,\n OpenAIResponsesRequest,\n OpenAIResponsesInputItem,\n OpenAIResponsesContentPart,\n OpenAIResponsesTool,\n OpenAIResponsesResponse,\n OpenAIResponsesStreamEvent,\n OpenAIResponsesOutputItem,\n OpenAIResponsesMessageOutput,\n OpenAIResponsesFunctionCallOutput,\n} from './types.ts';\n\n/**\n * Transform UPP request to OpenAI Responses API format\n *\n * Params are spread directly to allow pass-through of any OpenAI API fields,\n * even those not explicitly defined in our type. This enables developers to\n * use new API features without waiting for library updates.\n */\nexport function transformRequest(\n request: LLMRequest<OpenAIResponsesParams>,\n modelId: string\n): OpenAIResponsesRequest {\n const params = request.params ?? ({} as OpenAIResponsesParams);\n\n // Spread params to pass through all fields, then set required fields\n const openaiRequest: OpenAIResponsesRequest = {\n ...params,\n model: modelId,\n input: transformInputItems(request.messages, request.system),\n };\n\n // Tools come from request, not params\n if (request.tools && request.tools.length > 0) {\n openaiRequest.tools = request.tools.map(transformTool);\n }\n\n // Structured output via text.format (overrides params.text if set)\n if (request.structure) {\n const schema: Record<string, unknown> = {\n type: 'object',\n properties: request.structure.properties,\n required: request.structure.required,\n ...(request.structure.additionalProperties !== undefined\n ? { additionalProperties: request.structure.additionalProperties }\n : { additionalProperties: false }),\n };\n if (request.structure.description) {\n schema.description = request.structure.description;\n }\n\n openaiRequest.text = {\n format: {\n type: 'json_schema',\n name: 'json_response',\n description: request.structure.description,\n schema,\n strict: true,\n },\n };\n }\n\n return openaiRequest;\n}\n\n/**\n * Transform messages to Responses API input items\n */\nfunction transformInputItems(\n messages: Message[],\n system?: string\n): OpenAIResponsesInputItem[] | string {\n const result: OpenAIResponsesInputItem[] = [];\n\n if (system) {\n result.push({\n type: 'message',\n role: 'system',\n content: system,\n });\n }\n\n for (const message of messages) {\n const items = transformMessage(message);\n result.push(...items);\n }\n\n // If there's only one user message with simple text, return as string\n if (result.length === 1 && result[0]?.type === 'message') {\n const item = result[0] as { role?: string; content?: string | unknown[] };\n if (item.role === 'user' && typeof item.content === 'string') {\n return item.content;\n }\n }\n\n return result;\n}\n\n/**\n * Filter to only valid content blocks with a type property\n */\nfunction filterValidContent<T extends { type?: string }>(content: T[]): T[] {\n return content.filter((c) => c && typeof c.type === 'string');\n}\n\n/**\n * Transform a UPP Message to OpenAI Responses API input items\n */\nfunction transformMessage(message: Message): OpenAIResponsesInputItem[] {\n if (isUserMessage(message)) {\n const validContent = filterValidContent(message.content);\n // Check if we can use simple string content\n if (validContent.length === 1 && validContent[0]?.type === 'text') {\n return [\n {\n type: 'message',\n role: 'user',\n content: (validContent[0] as TextBlock).text,\n },\n ];\n }\n return [\n {\n type: 'message',\n role: 'user',\n content: validContent.map(transformContentPart),\n },\n ];\n }\n\n if (isAssistantMessage(message)) {\n const validContent = filterValidContent(message.content);\n const items: OpenAIResponsesInputItem[] = [];\n\n // Add message content - only text parts for assistant messages\n const contentParts: OpenAIResponsesContentPart[] = validContent\n .filter((c): c is TextBlock => c.type === 'text')\n .map((c): OpenAIResponsesContentPart => ({\n type: 'output_text',\n text: c.text,\n }));\n\n // Add assistant message if we have text content\n if (contentParts.length > 0) {\n items.push({\n type: 'message',\n role: 'assistant',\n content: contentParts,\n });\n }\n\n // Add function_call items for each tool call (must precede function_call_output)\n const openaiMeta = message.metadata?.openai as\n | { functionCallItems?: Array<{ id: string; call_id: string; name: string; arguments: string }> }\n | undefined;\n const functionCallItems = openaiMeta?.functionCallItems;\n\n if (functionCallItems && functionCallItems.length > 0) {\n for (const fc of functionCallItems) {\n items.push({\n type: 'function_call',\n id: fc.id,\n call_id: fc.call_id,\n name: fc.name,\n arguments: fc.arguments,\n });\n }\n } else if (message.toolCalls && message.toolCalls.length > 0) {\n for (const call of message.toolCalls) {\n items.push({\n type: 'function_call',\n id: `fc_${call.toolCallId}`,\n call_id: call.toolCallId,\n name: call.toolName,\n arguments: JSON.stringify(call.arguments),\n });\n }\n }\n\n return items;\n }\n\n if (isToolResultMessage(message)) {\n // Tool results are function_call_output items\n return message.results.map((result) => ({\n type: 'function_call_output' as const,\n call_id: result.toolCallId,\n output:\n typeof result.result === 'string'\n ? result.result\n : JSON.stringify(result.result),\n }));\n }\n\n return [];\n}\n\n/**\n * Transform a content block to Responses API format\n */\nfunction transformContentPart(block: ContentBlock): OpenAIResponsesContentPart {\n switch (block.type) {\n case 'text':\n return { type: 'input_text', text: block.text };\n\n case 'image': {\n const imageBlock = block as ImageBlock;\n if (imageBlock.source.type === 'base64') {\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${imageBlock.source.data}`,\n };\n }\n\n if (imageBlock.source.type === 'url') {\n return {\n type: 'input_image',\n image_url: imageBlock.source.url,\n };\n }\n\n if (imageBlock.source.type === 'bytes') {\n // Convert bytes to base64\n const base64 = btoa(\n Array.from(imageBlock.source.data)\n .map((b) => String.fromCharCode(b))\n .join('')\n );\n return {\n type: 'input_image',\n image_url: `data:${imageBlock.mimeType};base64,${base64}`,\n };\n }\n\n throw new Error('Unknown image source type');\n }\n\n default:\n throw new Error(`Unsupported content type: ${block.type}`);\n }\n}\n\n/**\n * Transform a UPP Tool to Responses API format\n */\nfunction transformTool(tool: Tool): OpenAIResponsesTool {\n return {\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n ...(tool.parameters.additionalProperties !== undefined\n ? { additionalProperties: tool.parameters.additionalProperties }\n : {}),\n },\n };\n}\n\n/**\n * Transform OpenAI Responses API response to UPP LLMResponse\n */\nexport function transformResponse(data: OpenAIResponsesResponse): LLMResponse {\n // Extract text content and tool calls from output items\n const textContent: TextBlock[] = [];\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n let hadRefusal = false;\n let structuredData: unknown;\n\n for (const item of data.output) {\n if (item.type === 'message') {\n const messageItem = item as OpenAIResponsesMessageOutput;\n for (const content of messageItem.content) {\n if (content.type === 'output_text') {\n textContent.push({ type: 'text', text: content.text });\n // Try to parse as JSON for structured output (native JSON mode)\n // Only set data if text is valid JSON\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(content.text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n } else if (content.type === 'refusal') {\n textContent.push({ type: 'text', text: content.refusal });\n hadRefusal = true;\n }\n }\n } else if (item.type === 'function_call') {\n const functionCall = item as OpenAIResponsesFunctionCallOutput;\n let args: Record<string, unknown> = {};\n try {\n args = JSON.parse(functionCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n toolCalls.push({\n toolCallId: functionCall.call_id,\n toolName: functionCall.name,\n arguments: args,\n });\n functionCallItems.push({\n id: functionCall.id,\n call_id: functionCall.call_id,\n name: functionCall.name,\n arguments: functionCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: data.id,\n metadata: {\n openai: {\n model: data.model,\n status: data.status,\n // Store response_id for multi-turn tool calling\n response_id: data.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: data.usage.input_tokens,\n outputTokens: data.usage.output_tokens,\n totalTokens: data.usage.total_tokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (data.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (data.status === 'incomplete') {\n stopReason = data.incomplete_details?.reason === 'max_output_tokens'\n ? 'max_tokens'\n : 'end_turn';\n } else if (data.status === 'failed') {\n stopReason = 'error';\n }\n if (hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n\n/**\n * State for accumulating streaming response\n */\nexport interface ResponsesStreamState {\n id: string;\n model: string;\n textByIndex: Map<number, string>;\n toolCalls: Map<\n number,\n { itemId?: string; callId?: string; name?: string; arguments: string }\n >;\n status: string;\n inputTokens: number;\n outputTokens: number;\n hadRefusal: boolean;\n}\n\n/**\n * Create initial stream state\n */\nexport function createStreamState(): ResponsesStreamState {\n return {\n id: '',\n model: '',\n textByIndex: new Map(),\n toolCalls: new Map(),\n status: 'in_progress',\n inputTokens: 0,\n outputTokens: 0,\n hadRefusal: false,\n };\n}\n\n/**\n * Transform OpenAI Responses API stream event to UPP StreamEvent\n * Returns array since one event may produce multiple UPP events\n */\nexport function transformStreamEvent(\n event: OpenAIResponsesStreamEvent,\n state: ResponsesStreamState\n): StreamEvent[] {\n const events: StreamEvent[] = [];\n\n switch (event.type) {\n case 'response.created':\n state.id = event.response.id;\n state.model = event.response.model;\n events.push({ type: 'message_start', index: 0, delta: {} });\n break;\n\n case 'response.in_progress':\n state.status = 'in_progress';\n break;\n\n case 'response.completed':\n state.status = 'completed';\n if (event.response.usage) {\n state.inputTokens = event.response.usage.input_tokens;\n state.outputTokens = event.response.usage.output_tokens;\n }\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.failed':\n state.status = 'failed';\n events.push({ type: 'message_stop', index: 0, delta: {} });\n break;\n\n case 'response.output_item.added':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_start',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_item.done':\n if (event.item.type === 'function_call') {\n const functionCall = event.item as OpenAIResponsesFunctionCallOutput;\n const existing = state.toolCalls.get(event.output_index) ?? {\n arguments: '',\n };\n existing.itemId = functionCall.id;\n existing.callId = functionCall.call_id;\n existing.name = functionCall.name;\n if (functionCall.arguments) {\n existing.arguments = functionCall.arguments;\n }\n state.toolCalls.set(event.output_index, existing);\n }\n events.push({\n type: 'content_block_stop',\n index: event.output_index,\n delta: {},\n });\n break;\n\n case 'response.output_text.delta':\n // Accumulate text\n const currentText = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentText + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n\n case 'response.output_text.done':\n state.textByIndex.set(event.output_index, event.text);\n break;\n\n case 'response.refusal.delta': {\n state.hadRefusal = true;\n const currentRefusal = state.textByIndex.get(event.output_index) ?? '';\n state.textByIndex.set(event.output_index, currentRefusal + event.delta);\n events.push({\n type: 'text_delta',\n index: event.output_index,\n delta: { text: event.delta },\n });\n break;\n }\n\n case 'response.refusal.done':\n state.hadRefusal = true;\n state.textByIndex.set(event.output_index, event.refusal);\n break;\n\n case 'response.function_call_arguments.delta': {\n // Accumulate function call arguments\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id && !toolCall.itemId) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id && !toolCall.callId) {\n toolCall.callId = event.call_id;\n }\n toolCall.arguments += event.delta;\n events.push({\n type: 'tool_call_delta',\n index: event.output_index,\n delta: {\n toolCallId: toolCall.callId ?? toolCall.itemId ?? '',\n toolName: toolCall.name,\n argumentsJson: event.delta,\n },\n });\n break;\n }\n\n case 'response.function_call_arguments.done': {\n // Finalize function call\n let toolCall = state.toolCalls.get(event.output_index);\n if (!toolCall) {\n toolCall = { arguments: '' };\n state.toolCalls.set(event.output_index, toolCall);\n }\n if (event.item_id) {\n toolCall.itemId = event.item_id;\n }\n if (event.call_id) {\n toolCall.callId = event.call_id;\n }\n toolCall.name = event.name;\n toolCall.arguments = event.arguments;\n break;\n }\n\n case 'error':\n // Error events are handled at the handler level\n break;\n\n default:\n // Ignore other events\n break;\n }\n\n return events;\n}\n\n/**\n * Build LLMResponse from accumulated stream state\n */\nexport function buildResponseFromState(state: ResponsesStreamState): LLMResponse {\n const textContent: TextBlock[] = [];\n let structuredData: unknown;\n\n // Combine all text content\n for (const [, text] of state.textByIndex) {\n if (text) {\n textContent.push({ type: 'text', text });\n // Try to parse as JSON for structured output (native JSON mode)\n if (structuredData === undefined) {\n try {\n structuredData = JSON.parse(text);\n } catch {\n // Not valid JSON - that's fine, might not be structured output\n }\n }\n }\n }\n\n const toolCalls: ToolCall[] = [];\n const functionCallItems: Array<{\n id: string;\n call_id: string;\n name: string;\n arguments: string;\n }> = [];\n for (const [, toolCall] of state.toolCalls) {\n let args: Record<string, unknown> = {};\n if (toolCall.arguments) {\n try {\n args = JSON.parse(toolCall.arguments);\n } catch {\n // Invalid JSON - use empty object\n }\n }\n const itemId = toolCall.itemId ?? '';\n const callId = toolCall.callId ?? toolCall.itemId ?? '';\n const name = toolCall.name ?? '';\n toolCalls.push({\n toolCallId: callId,\n toolName: name,\n arguments: args,\n });\n\n if (itemId && callId && name) {\n functionCallItems.push({\n id: itemId,\n call_id: callId,\n name,\n arguments: toolCall.arguments,\n });\n }\n }\n\n const message = new AssistantMessage(\n textContent,\n toolCalls.length > 0 ? toolCalls : undefined,\n {\n id: state.id,\n metadata: {\n openai: {\n model: state.model,\n status: state.status,\n // Store response_id for multi-turn tool calling\n response_id: state.id,\n functionCallItems:\n functionCallItems.length > 0 ? functionCallItems : undefined,\n },\n },\n }\n );\n\n const usage: TokenUsage = {\n inputTokens: state.inputTokens,\n outputTokens: state.outputTokens,\n totalTokens: state.inputTokens + state.outputTokens,\n };\n\n // Map status to stop reason\n let stopReason = 'end_turn';\n if (state.status === 'completed') {\n stopReason = toolCalls.length > 0 ? 'tool_use' : 'end_turn';\n } else if (state.status === 'failed') {\n stopReason = 'error';\n }\n if (state.hadRefusal && stopReason !== 'error') {\n stopReason = 'content_filter';\n }\n\n return {\n message,\n usage,\n stopReason,\n data: structuredData,\n };\n}\n","import type { LLMHandler, BoundLLMModel, LLMRequest, LLMResponse, LLMStreamResult, LLMCapabilities } from '../../types/llm.ts';\nimport type { StreamEvent } from '../../types/stream.ts';\nimport type { LLMProvider } from '../../types/provider.ts';\nimport { UPPError } from '../../types/errors.ts';\nimport { resolveApiKey } from '../../http/keys.ts';\nimport { doFetch, doStreamFetch } from '../../http/fetch.ts';\nimport { parseSSEStream } from '../../http/sse.ts';\nimport { normalizeHttpError } from '../../http/errors.ts';\nimport type { OpenAIResponsesParams, OpenAIResponsesResponse, OpenAIResponsesStreamEvent, OpenAIResponseErrorEvent } from './types.ts';\nimport {\n transformRequest,\n transformResponse,\n transformStreamEvent,\n createStreamState,\n buildResponseFromState,\n} from './transform.responses.ts';\n\nconst OPENAI_RESPONSES_API_URL = 'https://api.openai.com/v1/responses';\n\n/**\n * OpenAI API capabilities\n */\nconst OPENAI_CAPABILITIES: LLMCapabilities = {\n streaming: true,\n tools: true,\n structuredOutput: true,\n imageInput: true,\n videoInput: false,\n audioInput: false,\n};\n\n/**\n * Create OpenAI Responses API LLM handler\n */\nexport function createResponsesLLMHandler(): LLMHandler<OpenAIResponsesParams> {\n // Provider reference injected by createProvider() or OpenAI's custom factory\n let providerRef: LLMProvider<OpenAIResponsesParams> | null = null;\n\n return {\n _setProvider(provider: LLMProvider<OpenAIResponsesParams>) {\n providerRef = provider;\n },\n\n bind(modelId: string): BoundLLMModel<OpenAIResponsesParams> {\n // Use the injected provider reference\n if (!providerRef) {\n throw new UPPError(\n 'Provider reference not set. Handler must be used with createProvider() or have _setProvider called.',\n 'INVALID_REQUEST',\n 'openai',\n 'llm'\n );\n }\n\n const model: BoundLLMModel<OpenAIResponsesParams> = {\n modelId,\n capabilities: OPENAI_CAPABILITIES,\n\n get provider(): LLMProvider<OpenAIResponsesParams> {\n return providerRef!;\n },\n\n async complete(request: LLMRequest<OpenAIResponsesParams>): Promise<LLMResponse> {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n\n const response = await doFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n const data = (await response.json()) as OpenAIResponsesResponse;\n\n // Check for error in response\n if (data.status === 'failed' && data.error) {\n throw new UPPError(\n data.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n }\n\n return transformResponse(data);\n },\n\n stream(request: LLMRequest<OpenAIResponsesParams>): LLMStreamResult {\n const state = createStreamState();\n let responseResolve: (value: LLMResponse) => void;\n let responseReject: (error: Error) => void;\n\n const responsePromise = new Promise<LLMResponse>((resolve, reject) => {\n responseResolve = resolve;\n responseReject = reject;\n });\n\n async function* generateEvents(): AsyncGenerator<StreamEvent, void, unknown> {\n try {\n const apiKey = await resolveApiKey(\n request.config,\n 'OPENAI_API_KEY',\n 'openai',\n 'llm'\n );\n\n const baseUrl = request.config.baseUrl ?? OPENAI_RESPONSES_API_URL;\n const body = transformRequest(request, modelId);\n body.stream = true;\n\n const response = await doStreamFetch(\n baseUrl,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n signal: request.signal,\n },\n request.config,\n 'openai',\n 'llm'\n );\n\n if (!response.ok) {\n const error = await normalizeHttpError(response, 'openai', 'llm');\n responseReject(error);\n throw error;\n }\n\n if (!response.body) {\n const error = new UPPError(\n 'No response body for streaming request',\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n for await (const data of parseSSEStream(response.body)) {\n // Skip [DONE] marker\n if (data === '[DONE]') {\n continue;\n }\n\n // Check for OpenAI error event\n if (typeof data === 'object' && data !== null) {\n const event = data as OpenAIResponsesStreamEvent;\n\n // Check for error event\n if (event.type === 'error') {\n const errorEvent = event as OpenAIResponseErrorEvent;\n const error = new UPPError(\n errorEvent.error.message,\n 'PROVIDER_ERROR',\n 'openai',\n 'llm'\n );\n responseReject(error);\n throw error;\n }\n\n const uppEvents = transformStreamEvent(event, state);\n for (const uppEvent of uppEvents) {\n yield uppEvent;\n }\n }\n }\n\n // Build final response\n responseResolve(buildResponseFromState(state));\n } catch (error) {\n responseReject(error as Error);\n throw error;\n }\n }\n\n return {\n [Symbol.asyncIterator]() {\n return generateEvents();\n },\n response: responsePromise,\n };\n },\n };\n\n return model;\n },\n };\n}\n","import type {\n Provider,\n ModelReference,\n LLMHandler,\n LLMProvider,\n} from '../../types/provider.ts';\nimport { createCompletionsLLMHandler } from './llm.completions.ts';\nimport { createResponsesLLMHandler } from './llm.responses.ts';\nimport type { OpenAICompletionsParams, OpenAIResponsesParams, OpenAIConfig } from './types.ts';\n\n/** Union type for modalities interface */\ntype OpenAILLMParamsUnion = OpenAICompletionsParams | OpenAIResponsesParams;\n\n/**\n * OpenAI provider options\n */\nexport interface OpenAIProviderOptions {\n /**\n * Which API to use:\n * - 'responses': Modern Responses API (default, recommended)\n * - 'completions': Legacy Chat Completions API\n */\n api?: 'responses' | 'completions';\n}\n\n/**\n * OpenAI provider with configurable API mode\n *\n * @example\n * // Using the modern Responses API (default)\n * const model = openai('gpt-4o');\n *\n * @example\n * // Using the legacy Chat Completions API\n * const model = openai('gpt-4o', { api: 'completions' });\n *\n * @example\n * // Explicit Responses API\n * const model = openai('gpt-4o', { api: 'responses' });\n */\nexport interface OpenAIProvider extends Provider<OpenAIProviderOptions> {\n /**\n * Create a model reference\n * @param modelId - The model identifier (e.g., 'gpt-4o', 'gpt-4-turbo', 'o1-preview')\n * @param options - Provider options including API selection\n */\n (modelId: string, options?: OpenAIProviderOptions): ModelReference<OpenAIProviderOptions>;\n\n /** Provider name */\n readonly name: 'openai';\n\n /** Provider version */\n readonly version: string;\n\n /** Supported modalities */\n readonly modalities: {\n llm: LLMHandler<OpenAILLMParamsUnion>;\n };\n}\n\n/**\n * Create the OpenAI provider\n */\nfunction createOpenAIProvider(): OpenAIProvider {\n // Track which API mode is currently active for the modalities\n let currentApiMode: 'responses' | 'completions' = 'responses';\n\n // Create handlers eagerly so we can inject provider reference\n const responsesHandler = createResponsesLLMHandler();\n const completionsHandler = createCompletionsLLMHandler();\n\n const fn = function (\n modelId: string,\n options?: OpenAIProviderOptions\n ): ModelReference<OpenAIProviderOptions> {\n const apiMode = options?.api ?? 'responses';\n currentApiMode = apiMode;\n return { modelId, provider };\n };\n\n // Create a dynamic modalities object that returns the correct handler\n const modalities = {\n get llm(): LLMHandler<OpenAILLMParamsUnion> {\n return currentApiMode === 'completions'\n ? (completionsHandler as unknown as LLMHandler<OpenAILLMParamsUnion>)\n : (responsesHandler as unknown as LLMHandler<OpenAILLMParamsUnion>);\n },\n };\n\n // Define properties\n Object.defineProperties(fn, {\n name: {\n value: 'openai',\n writable: false,\n configurable: true,\n },\n version: {\n value: '1.0.0',\n writable: false,\n configurable: true,\n },\n modalities: {\n value: modalities,\n writable: false,\n configurable: true,\n },\n });\n\n const provider = fn as OpenAIProvider;\n\n // Inject provider reference into both handlers (spec compliance)\n responsesHandler._setProvider?.(provider as unknown as LLMProvider<OpenAIResponsesParams>);\n completionsHandler._setProvider?.(provider as unknown as LLMProvider<OpenAICompletionsParams>);\n\n return provider;\n}\n\n/**\n * OpenAI provider\n *\n * Supports both the modern Responses API (default) and legacy Chat Completions API.\n *\n * @example\n * ```ts\n * import { openai } from './providers/openai';\n * import { llm } from './core/llm';\n *\n * // Using Responses API (default, modern, recommended)\n * const model = llm({\n * model: openai('gpt-4o'),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Using Chat Completions API (legacy)\n * const legacyModel = llm({\n * model: openai('gpt-4o', { api: 'completions' }),\n * params: { max_tokens: 1000 }\n * });\n *\n * // Generate\n * const turn = await model.generate('Hello!');\n * console.log(turn.response.text);\n * ```\n */\nexport const openai = createOpenAIProvider();\n\n// Re-export types\nexport type {\n OpenAICompletionsParams,\n OpenAIResponsesParams,\n OpenAIConfig,\n OpenAIAPIMode,\n OpenAIModelOptions,\n OpenAIModelReference,\n} from './types.ts';\n"],"mappings":";;;;;;;;;;;;;;;;;;AA8BO,SAAS,iBACd,SACA,SAC0B;AAC1B,QAAM,SAAS,QAAQ,UAAW,CAAC;AAGnC,QAAM,gBAA0C;AAAA,IAC9C,GAAG;AAAA,IACH,OAAO;AAAA,IACP,UAAU,kBAAkB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC9D;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAI,aAAa;AAAA,EACvD;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,kBAAkB;AAAA,MAC9B,MAAM;AAAA,MACN,aAAa;AAAA,QACX,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,kBACP,UACA,QAC4B;AAC5B,QAAM,SAAqC,CAAC;AAG5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAGA,aAAW,WAAW,UAAU;AAE9B,QAAI,oBAAoB,OAAO,GAAG;AAChC,YAAM,eAAe,qBAAqB,OAAO;AACjD,aAAO,KAAK,GAAG,YAAY;AAAA,IAC7B,OAAO;AACL,YAAM,cAAc,iBAAiB,OAAO;AAC5C,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,mBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAAS,iBAAiB,SAAmD;AAC3E,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAU,aAAa,CAAC,EAAgB;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,aAAa,IAAI,qBAAqB;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAe,mBAAmB,QAAQ,OAAO;AAEvD,UAAM,cAAc,aACjB,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AAEV,UAAM,mBAA6C;AAAA,MACjD,MAAM;AAAA,MACN,SAAS,eAAe;AAAA,IAC1B;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AACrD,MAAC,iBAAuD,aACtD,QAAQ,UAAU,IAAI,CAAC,UAAU;AAAA,QAC/B,IAAI,KAAK;AAAA,QACT,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C;AAAA,MACF,EAAE;AAAA,IACN;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAKhC,UAAM,UAAU,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MAC/C,MAAM;AAAA,MACN,cAAc,OAAO;AAAA,MACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAGF,WAAO,QAAQ,CAAC,KAAK;AAAA,EACvB;AAEA,SAAO;AACT;AAKO,SAAS,qBACd,SAC4B;AAC5B,MAAI,CAAC,oBAAoB,OAAO,GAAG;AACjC,UAAM,SAAS,iBAAiB,OAAO;AACvC,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,EAC9B;AAEA,SAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,IACtC,MAAM;AAAA,IACN,cAAc,OAAO;AAAA,IACrB,SACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,EACpC,EAAE;AACJ;AAKA,SAAS,sBAAsB,OAAwC;AACrE,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,IAE1C,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI;AAEJ,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,cAAM,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,MACpE,WAAW,WAAW,OAAO,SAAS,OAAO;AAC3C,cAAM,WAAW,OAAO;AAAA,MAC1B,WAAW,WAAW,OAAO,SAAS,SAAS;AAE7C,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,cAAM,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,MACpD,OAAO;AACL,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,WAAW,EAAE,IAAI;AAAA,MACnB;AAAA,IACF;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAAS,cAAc,MAAmC;AACxD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY,KAAK,WAAW;AAAA,QAC5B,UAAU,KAAK,WAAW;AAAA,QAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,kBAAkB,MAA8C;AAC9E,QAAM,SAAS,KAAK,QAAQ,CAAC;AAC7B,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAGA,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAE/D,QAAI;AACF,uBAAiB,KAAK,MAAM,OAAO,QAAQ,OAAO;AAAA,IACpD,QAAQ;AAAA,IAER;AAAA,EACF;AACA,MAAI,aAAa;AACjB,MAAI,OAAO,QAAQ,SAAS;AAC1B,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAC;AAC/D,iBAAa;AAAA,EACf;AAGA,QAAM,YAAwB,CAAC;AAC/B,MAAI,OAAO,QAAQ,YAAY;AAC7B,eAAW,QAAQ,OAAO,QAAQ,YAAY;AAC5C,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,KAAK,SAAS,SAAS;AAAA,MAC3C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK,SAAS;AAAA,QACxB,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,eAAe,OAAO;AAAA,UACtB,oBAAoB,KAAK;AAAA,UACzB,cAAc,KAAK;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,UAAQ,OAAO,eAAe;AAAA,IAC5B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,cAAc,eAAe,kBAAkB;AACjD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAmBO,SAAS,oBAA4C;AAC1D,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,WAAW,oBAAI,IAAI;AAAA,IACnB,cAAc;AAAA,IACd,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAAS,qBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAG/B,MAAI,MAAM,MAAM,CAAC,MAAM,IAAI;AACzB,UAAM,KAAK,MAAM;AACjB,WAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,EAC5D;AACA,MAAI,MAAM,OAAO;AACf,UAAM,QAAQ,MAAM;AAAA,EACtB;AAGA,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,MAAI,QAAQ;AAEV,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AACA,QAAI,OAAO,MAAM,SAAS;AACxB,YAAM,aAAa;AACnB,YAAM,QAAQ,OAAO,MAAM;AAC3B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO,EAAE,MAAM,OAAO,MAAM,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,MAAM,YAAY;AAC3B,iBAAW,iBAAiB,OAAO,MAAM,YAAY;AACnD,cAAM,QAAQ,cAAc;AAC5B,YAAI,WAAW,MAAM,UAAU,IAAI,KAAK;AAExC,YAAI,CAAC,UAAU;AACb,qBAAW,EAAE,IAAI,IAAI,MAAM,IAAI,WAAW,GAAG;AAC7C,gBAAM,UAAU,IAAI,OAAO,QAAQ;AAAA,QACrC;AAEA,YAAI,cAAc,IAAI;AACpB,mBAAS,KAAK,cAAc;AAAA,QAC9B;AACA,YAAI,cAAc,UAAU,MAAM;AAChC,mBAAS,OAAO,cAAc,SAAS;AAAA,QACzC;AACA,YAAI,cAAc,UAAU,WAAW;AACrC,mBAAS,aAAa,cAAc,SAAS;AAC7C,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN;AAAA,YACA,OAAO;AAAA,cACL,YAAY,SAAS;AAAA,cACrB,UAAU,SAAS;AAAA,cACnB,eAAe,cAAc,SAAS;AAAA,YACxC;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,eAAe;AACxB,YAAM,eAAe,OAAO;AAC5B,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAAA,IAC3D;AAAA,EACF;AAGA,MAAI,MAAM,OAAO;AACf,UAAM,cAAc,MAAM,MAAM;AAChC,UAAM,eAAe,MAAM,MAAM;AAAA,EACnC;AAEA,SAAO;AACT;AAKO,SAAS,uBAAuB,OAA4C;AACjF,QAAM,cAA2B,CAAC;AAClC,MAAI;AACJ,MAAI,MAAM,MAAM;AACd,gBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAEnD,QAAI;AACF,uBAAiB,KAAK,MAAM,MAAM,IAAI;AAAA,IACxC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,cAAU,KAAK;AAAA,MACb,YAAY,SAAS;AAAA,MACrB,UAAU,SAAS;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,eAAe,MAAM;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,UAAQ,MAAM,cAAc;AAAA,IAC1B,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,IACF,KAAK;AACH,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,MAAM,cAAc,eAAe,kBAAkB;AACvD,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AC/hBA,IAAM,iBAAiB;AAKvB,IAAM,sBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,8BAAmE;AAEjF,MAAI,cAA2D;AAE/D,SAAO;AAAA,IACL,aAAa,UAAgD;AAC3D,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAyD;AAE5D,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAAgD;AAAA,QACpD;AAAA,QACA,cAAc;AAAA,QAEd,IAAI,WAAiD;AACnD,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAAoE;AACjF,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAO,iBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,iBAAO,kBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAA+D;AACpE,gBAAM,QAAQ,kBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAO,iBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AACd,mBAAK,iBAAiB,EAAE,eAAe,KAAK;AAE5C,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,0BAAM,YAAY,MAAM;AACxB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,UAAU,WAAW;AAAA,sBACrB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAY,qBAAqB,OAAO,KAAK;AACnD,6BAAW,SAAS,WAAW;AAC7B,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgB,uBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACxKO,SAASA,kBACd,SACA,SACwB;AACxB,QAAM,SAAS,QAAQ,UAAW,CAAC;AAGnC,QAAM,gBAAwC;AAAA,IAC5C,GAAG;AAAA,IACH,OAAO;AAAA,IACP,OAAO,oBAAoB,QAAQ,UAAU,QAAQ,MAAM;AAAA,EAC7D;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,kBAAc,QAAQ,QAAQ,MAAM,IAAIC,cAAa;AAAA,EACvD;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,SAAkC;AAAA,MACtC,MAAM;AAAA,MACN,YAAY,QAAQ,UAAU;AAAA,MAC9B,UAAU,QAAQ,UAAU;AAAA,MAC5B,GAAI,QAAQ,UAAU,yBAAyB,SAC3C,EAAE,sBAAsB,QAAQ,UAAU,qBAAqB,IAC/D,EAAE,sBAAsB,MAAM;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU,aAAa;AACjC,aAAO,cAAc,QAAQ,UAAU;AAAA,IACzC;AAEA,kBAAc,OAAO;AAAA,MACnB,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,QACN,aAAa,QAAQ,UAAU;AAAA,QAC/B;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,oBACP,UACA,QACqC;AACrC,QAAM,SAAqC,CAAC;AAE5C,MAAI,QAAQ;AACV,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQC,kBAAiB,OAAO;AACtC,WAAO,KAAK,GAAG,KAAK;AAAA,EACtB;AAGA,MAAI,OAAO,WAAW,KAAK,OAAO,CAAC,GAAG,SAAS,WAAW;AACxD,UAAM,OAAO,OAAO,CAAC;AACrB,QAAI,KAAK,SAAS,UAAU,OAAO,KAAK,YAAY,UAAU;AAC5D,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAASC,oBAAgD,SAAmB;AAC1E,SAAO,QAAQ,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE,SAAS,QAAQ;AAC9D;AAKA,SAASD,kBAAiB,SAA8C;AACtE,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,eAAeC,oBAAmB,QAAQ,OAAO;AAEvD,QAAI,aAAa,WAAW,KAAK,aAAa,CAAC,GAAG,SAAS,QAAQ;AACjE,aAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAU,aAAa,CAAC,EAAgB;AAAA,QAC1C;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS,aAAa,IAAI,oBAAoB;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEA,MAAI,mBAAmB,OAAO,GAAG;AAC/B,UAAM,eAAeA,oBAAmB,QAAQ,OAAO;AACvD,UAAM,QAAoC,CAAC;AAG3C,UAAM,eAA6C,aAChD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,OAAmC;AAAA,MACvC,MAAM;AAAA,MACN,MAAM,EAAE;AAAA,IACV,EAAE;AAGJ,QAAI,aAAa,SAAS,GAAG;AAC3B,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,aAAa,QAAQ,UAAU;AAGrC,UAAM,oBAAoB,YAAY;AAEtC,QAAI,qBAAqB,kBAAkB,SAAS,GAAG;AACrD,iBAAW,MAAM,mBAAmB;AAClC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,SAAS,GAAG;AAAA,UACZ,MAAM,GAAG;AAAA,UACT,WAAW,GAAG;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC5D,iBAAW,QAAQ,QAAQ,WAAW;AACpC,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,IAAI,MAAM,KAAK,UAAU;AAAA,UACzB,SAAS,KAAK;AAAA,UACd,MAAM,KAAK;AAAA,UACX,WAAW,KAAK,UAAU,KAAK,SAAS;AAAA,QAC1C,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,oBAAoB,OAAO,GAAG;AAEhC,WAAO,QAAQ,QAAQ,IAAI,CAAC,YAAY;AAAA,MACtC,MAAM;AAAA,MACN,SAAS,OAAO;AAAA,MAChB,QACE,OAAO,OAAO,WAAW,WACrB,OAAO,SACP,KAAK,UAAU,OAAO,MAAM;AAAA,IACpC,EAAE;AAAA,EACJ;AAEA,SAAO,CAAC;AACV;AAKA,SAAS,qBAAqB,OAAiD;AAC7E,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,EAAE,MAAM,cAAc,MAAM,MAAM,KAAK;AAAA,IAEhD,KAAK,SAAS;AACZ,YAAM,aAAa;AACnB,UAAI,WAAW,OAAO,SAAS,UAAU;AACvC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,WAAW,OAAO,IAAI;AAAA,QACzE;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,OAAO;AACpC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,WAAW,OAAO;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI,WAAW,OAAO,SAAS,SAAS;AAEtC,cAAM,SAAS;AAAA,UACb,MAAM,KAAK,WAAW,OAAO,IAAI,EAC9B,IAAI,CAAC,MAAM,OAAO,aAAa,CAAC,CAAC,EACjC,KAAK,EAAE;AAAA,QACZ;AACA,eAAO;AAAA,UACL,MAAM;AAAA,UACN,WAAW,QAAQ,WAAW,QAAQ,WAAW,MAAM;AAAA,QACzD;AAAA,MACF;AAEA,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,6BAA6B,MAAM,IAAI,EAAE;AAAA,EAC7D;AACF;AAKA,SAASF,eAAc,MAAiC;AACtD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,YAAY;AAAA,MACV,MAAM;AAAA,MACN,YAAY,KAAK,WAAW;AAAA,MAC5B,UAAU,KAAK,WAAW;AAAA,MAC1B,GAAI,KAAK,WAAW,yBAAyB,SACzC,EAAE,sBAAsB,KAAK,WAAW,qBAAqB,IAC7D,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAASG,mBAAkB,MAA4C;AAE5E,QAAM,cAA2B,CAAC;AAClC,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,MAAI,aAAa;AACjB,MAAI;AAEJ,aAAW,QAAQ,KAAK,QAAQ;AAC9B,QAAI,KAAK,SAAS,WAAW;AAC3B,YAAM,cAAc;AACpB,iBAAW,WAAW,YAAY,SAAS;AACzC,YAAI,QAAQ,SAAS,eAAe;AAClC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,KAAK,CAAC;AAGrD,cAAI,mBAAmB,QAAW;AAChC,gBAAI;AACF,+BAAiB,KAAK,MAAM,QAAQ,IAAI;AAAA,YAC1C,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,WAAW,QAAQ,SAAS,WAAW;AACrC,sBAAY,KAAK,EAAE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,CAAC;AACxD,uBAAa;AAAA,QACf;AAAA,MACF;AAAA,IACF,WAAW,KAAK,SAAS,iBAAiB;AACxC,YAAM,eAAe;AACrB,UAAI,OAAgC,CAAC;AACrC,UAAI;AACF,eAAO,KAAK,MAAM,aAAa,SAAS;AAAA,MAC1C,QAAQ;AAAA,MAER;AACA,gBAAU,KAAK;AAAA,QACb,YAAY,aAAa;AAAA,QACzB,UAAU,aAAa;AAAA,QACvB,WAAW;AAAA,MACb,CAAC;AACD,wBAAkB,KAAK;AAAA,QACrB,IAAI,aAAa;AAAA,QACjB,SAAS,aAAa;AAAA,QACtB,MAAM,aAAa;AAAA,QACnB,WAAW,aAAa;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,KAAK;AAAA,MACT,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,KAAK;AAAA,UACZ,QAAQ,KAAK;AAAA;AAAA,UAEb,aAAa,KAAK;AAAA,UAClB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,KAAK,MAAM;AAAA,IACxB,cAAc,KAAK,MAAM;AAAA,IACzB,aAAa,KAAK,MAAM;AAAA,EAC1B;AAGA,MAAI,aAAa;AACjB,MAAI,KAAK,WAAW,aAAa;AAC/B,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,KAAK,WAAW,cAAc;AACvC,iBAAa,KAAK,oBAAoB,WAAW,sBAC7C,eACA;AAAA,EACN,WAAW,KAAK,WAAW,UAAU;AACnC,iBAAa;AAAA,EACf;AACA,MAAI,cAAc,eAAe,SAAS;AACxC,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;AAsBO,SAASC,qBAA0C;AACxD,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,aAAa,oBAAI,IAAI;AAAA,IACrB,WAAW,oBAAI,IAAI;AAAA,IACnB,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,cAAc;AAAA,IACd,YAAY;AAAA,EACd;AACF;AAMO,SAASC,sBACd,OACA,OACe;AACf,QAAM,SAAwB,CAAC;AAE/B,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,YAAM,KAAK,MAAM,SAAS;AAC1B,YAAM,QAAQ,MAAM,SAAS;AAC7B,aAAO,KAAK,EAAE,MAAM,iBAAiB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AAC1D;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,UAAI,MAAM,SAAS,OAAO;AACxB,cAAM,cAAc,MAAM,SAAS,MAAM;AACzC,cAAM,eAAe,MAAM,SAAS,MAAM;AAAA,MAC5C;AACA,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,YAAM,SAAS;AACf,aAAO,KAAK,EAAE,MAAM,gBAAgB,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC;AACzD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AACH,UAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAM,eAAe,MAAM;AAC3B,cAAM,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY,KAAK;AAAA,UAC1D,WAAW;AAAA,QACb;AACA,iBAAS,SAAS,aAAa;AAC/B,iBAAS,SAAS,aAAa;AAC/B,iBAAS,OAAO,aAAa;AAC7B,YAAI,aAAa,WAAW;AAC1B,mBAAS,YAAY,aAAa;AAAA,QACpC;AACA,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,CAAC;AAAA,MACV,CAAC;AACD;AAAA,IAEF,KAAK;AAEH,YAAM,cAAc,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACjE,YAAM,YAAY,IAAI,MAAM,cAAc,cAAc,MAAM,KAAK;AACnE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IAEF,KAAK;AACH,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,IAAI;AACpD;AAAA,IAEF,KAAK,0BAA0B;AAC7B,YAAM,aAAa;AACnB,YAAM,iBAAiB,MAAM,YAAY,IAAI,MAAM,YAAY,KAAK;AACpE,YAAM,YAAY,IAAI,MAAM,cAAc,iBAAiB,MAAM,KAAK;AACtE,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO,EAAE,MAAM,MAAM,MAAM;AAAA,MAC7B,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK;AACH,YAAM,aAAa;AACnB,YAAM,YAAY,IAAI,MAAM,cAAc,MAAM,OAAO;AACvD;AAAA,IAEF,KAAK,0CAA0C;AAE7C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,WAAW,CAAC,SAAS,QAAQ;AACrC,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,aAAa,MAAM;AAC5B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,MAAM;AAAA,QACb,OAAO;AAAA,UACL,YAAY,SAAS,UAAU,SAAS,UAAU;AAAA,UAClD,UAAU,SAAS;AAAA,UACnB,eAAe,MAAM;AAAA,QACvB;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAAA,IAEA,KAAK,yCAAyC;AAE5C,UAAI,WAAW,MAAM,UAAU,IAAI,MAAM,YAAY;AACrD,UAAI,CAAC,UAAU;AACb,mBAAW,EAAE,WAAW,GAAG;AAC3B,cAAM,UAAU,IAAI,MAAM,cAAc,QAAQ;AAAA,MAClD;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,UAAI,MAAM,SAAS;AACjB,iBAAS,SAAS,MAAM;AAAA,MAC1B;AACA,eAAS,OAAO,MAAM;AACtB,eAAS,YAAY,MAAM;AAC3B;AAAA,IACF;AAAA,IAEA,KAAK;AAEH;AAAA,IAEF;AAEE;AAAA,EACJ;AAEA,SAAO;AACT;AAKO,SAASC,wBAAuB,OAA0C;AAC/E,QAAM,cAA2B,CAAC;AAClC,MAAI;AAGJ,aAAW,CAAC,EAAE,IAAI,KAAK,MAAM,aAAa;AACxC,QAAI,MAAM;AACR,kBAAY,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAEvC,UAAI,mBAAmB,QAAW;AAChC,YAAI;AACF,2BAAiB,KAAK,MAAM,IAAI;AAAA,QAClC,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,YAAwB,CAAC;AAC/B,QAAM,oBAKD,CAAC;AACN,aAAW,CAAC,EAAE,QAAQ,KAAK,MAAM,WAAW;AAC1C,QAAI,OAAgC,CAAC;AACrC,QAAI,SAAS,WAAW;AACtB,UAAI;AACF,eAAO,KAAK,MAAM,SAAS,SAAS;AAAA,MACtC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,UAAM,SAAS,SAAS,UAAU;AAClC,UAAM,SAAS,SAAS,UAAU,SAAS,UAAU;AACrD,UAAM,OAAO,SAAS,QAAQ;AAC9B,cAAU,KAAK;AAAA,MACb,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,WAAW;AAAA,IACb,CAAC;AAED,QAAI,UAAU,UAAU,MAAM;AAC5B,wBAAkB,KAAK;AAAA,QACrB,IAAI;AAAA,QACJ,SAAS;AAAA,QACT;AAAA,QACA,WAAW,SAAS;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,UAAU,IAAI;AAAA,IAClB;AAAA,IACA,UAAU,SAAS,IAAI,YAAY;AAAA,IACnC;AAAA,MACE,IAAI,MAAM;AAAA,MACV,UAAU;AAAA,QACR,QAAQ;AAAA,UACN,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA;AAAA,UAEd,aAAa,MAAM;AAAA,UACnB,mBACE,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAoB;AAAA,IACxB,aAAa,MAAM;AAAA,IACnB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM,cAAc,MAAM;AAAA,EACzC;AAGA,MAAI,aAAa;AACjB,MAAI,MAAM,WAAW,aAAa;AAChC,iBAAa,UAAU,SAAS,IAAI,aAAa;AAAA,EACnD,WAAW,MAAM,WAAW,UAAU;AACpC,iBAAa;AAAA,EACf;AACA,MAAI,MAAM,cAAc,eAAe,SAAS;AAC9C,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EACR;AACF;;;AClpBA,IAAM,2BAA2B;AAKjC,IAAMC,uBAAuC;AAAA,EAC3C,WAAW;AAAA,EACX,OAAO;AAAA,EACP,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,SAAS,4BAA+D;AAE7E,MAAI,cAAyD;AAE7D,SAAO;AAAA,IACL,aAAa,UAA8C;AACzD,oBAAc;AAAA,IAChB;AAAA,IAEA,KAAK,SAAuD;AAE1D,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,QAA8C;AAAA,QAClD;AAAA,QACA,cAAcA;AAAA,QAEd,IAAI,WAA+C;AACjD,iBAAO;AAAA,QACT;AAAA,QAEA,MAAM,SAAS,SAAkE;AAC/E,gBAAM,SAAS,MAAM;AAAA,YACnB,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,gBAAM,OAAOC,kBAAiB,SAAS,OAAO;AAE9C,gBAAM,WAAW,MAAM;AAAA,YACrB;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,SAAS;AAAA,gBACP,gBAAgB;AAAA,gBAChB,eAAe,UAAU,MAAM;AAAA,cACjC;AAAA,cACA,MAAM,KAAK,UAAU,IAAI;AAAA,cACzB,QAAQ,QAAQ;AAAA,YAClB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,OAAQ,MAAM,SAAS,KAAK;AAGlC,cAAI,KAAK,WAAW,YAAY,KAAK,OAAO;AAC1C,kBAAM,IAAI;AAAA,cACR,KAAK,MAAM;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAEA,iBAAOC,mBAAkB,IAAI;AAAA,QAC/B;AAAA,QAEA,OAAO,SAA6D;AAClE,gBAAM,QAAQC,mBAAkB;AAChC,cAAI;AACJ,cAAI;AAEJ,gBAAM,kBAAkB,IAAI,QAAqB,CAAC,SAAS,WAAW;AACpE,8BAAkB;AAClB,6BAAiB;AAAA,UACnB,CAAC;AAED,0BAAgB,iBAA6D;AAC3E,gBAAI;AACF,oBAAM,SAAS,MAAM;AAAA,gBACnB,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAEA,oBAAM,UAAU,QAAQ,OAAO,WAAW;AAC1C,oBAAM,OAAOF,kBAAiB,SAAS,OAAO;AAC9C,mBAAK,SAAS;AAEd,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,kBACE,QAAQ;AAAA,kBACR,SAAS;AAAA,oBACP,gBAAgB;AAAA,oBAChB,eAAe,UAAU,MAAM;AAAA,kBACjC;AAAA,kBACA,MAAM,KAAK,UAAU,IAAI;AAAA,kBACzB,QAAQ,QAAQ;AAAA,gBAClB;AAAA,gBACA,QAAQ;AAAA,gBACR;AAAA,gBACA;AAAA,cACF;AAEA,kBAAI,CAAC,SAAS,IAAI;AAChB,sBAAM,QAAQ,MAAM,mBAAmB,UAAU,UAAU,KAAK;AAChE,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,kBAAI,CAAC,SAAS,MAAM;AAClB,sBAAM,QAAQ,IAAI;AAAA,kBAChB;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,+BAAe,KAAK;AACpB,sBAAM;AAAA,cACR;AAEA,+BAAiB,QAAQ,eAAe,SAAS,IAAI,GAAG;AAEtD,oBAAI,SAAS,UAAU;AACrB;AAAA,gBACF;AAGA,oBAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,wBAAM,QAAQ;AAGd,sBAAI,MAAM,SAAS,SAAS;AAC1B,0BAAM,aAAa;AACnB,0BAAM,QAAQ,IAAI;AAAA,sBAChB,WAAW,MAAM;AAAA,sBACjB;AAAA,sBACA;AAAA,sBACA;AAAA,oBACF;AACA,mCAAe,KAAK;AACpB,0BAAM;AAAA,kBACR;AAEA,wBAAM,YAAYG,sBAAqB,OAAO,KAAK;AACnD,6BAAW,YAAY,WAAW;AAChC,0BAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAGA,8BAAgBC,wBAAuB,KAAK,CAAC;AAAA,YAC/C,SAAS,OAAO;AACd,6BAAe,KAAc;AAC7B,oBAAM;AAAA,YACR;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,CAAC,OAAO,aAAa,IAAI;AACvB,qBAAO,eAAe;AAAA,YACxB;AAAA,YACA,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACnJA,SAAS,uBAAuC;AAE9C,MAAI,iBAA8C;AAGlD,QAAM,mBAAmB,0BAA0B;AACnD,QAAM,qBAAqB,4BAA4B;AAEvD,QAAM,KAAK,SACT,SACA,SACuC;AACvC,UAAM,UAAU,SAAS,OAAO;AAChC,qBAAiB;AACjB,WAAO,EAAE,SAAS,SAAS;AAAA,EAC7B;AAGA,QAAM,aAAa;AAAA,IACjB,IAAI,MAAwC;AAC1C,aAAO,mBAAmB,gBACrB,qBACA;AAAA,IACP;AAAA,EACF;AAGA,SAAO,iBAAiB,IAAI;AAAA,IAC1B,MAAM;AAAA,MACJ,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,IACA,YAAY;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,cAAc;AAAA,IAChB;AAAA,EACF,CAAC;AAED,QAAM,WAAW;AAGjB,mBAAiB,eAAe,QAAyD;AACzF,qBAAmB,eAAe,QAA2D;AAE7F,SAAO;AACT;AA6BO,IAAM,SAAS,qBAAqB;","names":["transformRequest","transformTool","transformMessage","filterValidContent","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState","OPENAI_CAPABILITIES","transformRequest","transformResponse","createStreamState","transformStreamEvent","buildResponseFromState"]}
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import { b as Provider, M as ModelReference, a as LLMHandler } from '../provider-CUJWjgNl.js';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* OpenRouter
|
|
5
|
-
* These are passed through to the
|
|
4
|
+
* OpenRouter Chat Completions API parameters
|
|
5
|
+
* These are passed through to the /api/v1/chat/completions endpoint
|
|
6
6
|
*/
|
|
7
|
-
interface
|
|
7
|
+
interface OpenRouterCompletionsParams {
|
|
8
8
|
/** Maximum number of tokens to generate */
|
|
9
9
|
max_tokens?: number;
|
|
10
|
-
/** Maximum output tokens (Responses API) */
|
|
11
|
-
max_output_tokens?: number;
|
|
12
10
|
/** Temperature for randomness (0.0 - 2.0) */
|
|
13
11
|
temperature?: number;
|
|
14
12
|
/** Top-p (nucleus) sampling (0.0 - 1.0) */
|
|
@@ -39,7 +37,7 @@ interface OpenRouterLLMParams {
|
|
|
39
37
|
top_a?: number;
|
|
40
38
|
/** Whether to enable parallel tool calls */
|
|
41
39
|
parallel_tool_calls?: boolean;
|
|
42
|
-
/** Response format for structured output
|
|
40
|
+
/** Response format for structured output */
|
|
43
41
|
response_format?: OpenRouterResponseFormat;
|
|
44
42
|
/**
|
|
45
43
|
* Prompt transforms to apply
|
|
@@ -62,7 +60,6 @@ interface OpenRouterLLMParams {
|
|
|
62
60
|
provider?: OpenRouterProviderPreferences;
|
|
63
61
|
/**
|
|
64
62
|
* Predicted output for latency optimization
|
|
65
|
-
* https://platform.openai.com/docs/guides/latency-optimization#use-predicted-outputs
|
|
66
63
|
*/
|
|
67
64
|
prediction?: {
|
|
68
65
|
type: 'content';
|
|
@@ -75,8 +72,22 @@ interface OpenRouterLLMParams {
|
|
|
75
72
|
/** If true, returns the transformed request body sent to the provider */
|
|
76
73
|
echo_upstream_body?: boolean;
|
|
77
74
|
};
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* OpenRouter Responses API parameters (Beta)
|
|
78
|
+
* These are passed through to the /api/v1/responses endpoint
|
|
79
|
+
*/
|
|
80
|
+
interface OpenRouterResponsesParams {
|
|
81
|
+
/** Maximum output tokens */
|
|
82
|
+
max_output_tokens?: number;
|
|
83
|
+
/** Temperature for randomness (0.0 - 2.0) */
|
|
84
|
+
temperature?: number;
|
|
85
|
+
/** Top-p (nucleus) sampling (0.0 - 1.0) */
|
|
86
|
+
top_p?: number;
|
|
87
|
+
/** Whether to enable parallel tool calls */
|
|
88
|
+
parallel_tool_calls?: boolean;
|
|
78
89
|
/**
|
|
79
|
-
* Reasoning configuration
|
|
90
|
+
* Reasoning configuration
|
|
80
91
|
*/
|
|
81
92
|
reasoning?: {
|
|
82
93
|
effort?: 'low' | 'medium' | 'high';
|
|
@@ -141,6 +152,8 @@ type OpenRouterResponseFormat = {
|
|
|
141
152
|
};
|
|
142
153
|
};
|
|
143
154
|
|
|
155
|
+
/** Union type for modalities interface */
|
|
156
|
+
type OpenRouterLLMParamsUnion = OpenRouterCompletionsParams | OpenRouterResponsesParams;
|
|
144
157
|
/**
|
|
145
158
|
* OpenRouter provider options
|
|
146
159
|
*/
|
|
@@ -180,7 +193,7 @@ interface OpenRouterProvider extends Provider<OpenRouterProviderOptions> {
|
|
|
180
193
|
readonly version: string;
|
|
181
194
|
/** Supported modalities */
|
|
182
195
|
readonly modalities: {
|
|
183
|
-
llm: LLMHandler<
|
|
196
|
+
llm: LLMHandler<OpenRouterLLMParamsUnion>;
|
|
184
197
|
};
|
|
185
198
|
}
|
|
186
199
|
/**
|
|
@@ -232,4 +245,4 @@ interface OpenRouterProvider extends Provider<OpenRouterProviderOptions> {
|
|
|
232
245
|
*/
|
|
233
246
|
declare const openrouter: OpenRouterProvider;
|
|
234
247
|
|
|
235
|
-
export { type OpenRouterAPIMode, type
|
|
248
|
+
export { type OpenRouterAPIMode, type OpenRouterCompletionsParams, type OpenRouterConfig, type OpenRouterModelOptions, type OpenRouterModelReference, type OpenRouterProviderPreferences, type OpenRouterResponsesParams, openrouter };
|