@mariozechner/pi-ai 0.23.2 → 0.23.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EASN,KAAK,aAAa,EAClB,MAAM,eAAe,CAAC;AAEvB,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAMrB,MAAM,WAAW,aAAc,SAAQ,aAAa;IACnD,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC;IACrC,QAAQ,CAAC,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,aAAa,CAAC;KACtB,CAAC;CACF;AAKD,eAAO,MAAM,YAAY,EAAE,cAAc,CAAC,sBAAsB,CA+M/D,CAAC","sourcesContent":["import {\n\ttype Content,\n\tFinishReason,\n\tFunctionCallingConfigMode,\n\ttype GenerateContentConfig,\n\ttype GenerateContentParameters,\n\tGoogleGenAI,\n\ttype Part,\n\ttype ThinkingConfig,\n\ttype ThinkingLevel,\n} from \"@google/genai\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface GoogleOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\";\n\tthinking?: {\n\t\tenabled: boolean;\n\t\tbudgetTokens?: number; // -1 for dynamic, 0 to disable\n\t\tlevel?: ThinkingLevel;\n\t};\n}\n\n// Counter for generating unique tool call IDs\nlet toolCallCounter = 0;\n\nexport const streamGoogle: StreamFunction<\"google-generative-ai\"> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: GoogleOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"google-generative-ai\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst client = createClient(model, options?.apiKey);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\tconst googleStream = await client.models.generateContentStream(params);\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tfor await (const chunk of googleStream) {\n\t\t\t\tconst candidate = chunk.candidates?.[0];\n\t\t\t\tif (candidate?.content?.parts) {\n\t\t\t\t\tfor (const part of candidate.content.parts) {\n\t\t\t\t\t\tif (part.text !== undefined) {\n\t\t\t\t\t\t\tconst isThinking = part.thought === true;\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\t(isThinking && currentBlock.type !== \"thinking\") ||\n\t\t\t\t\t\t\t\t(!isThinking && currentBlock.type !== \"text\")\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blocks.length - 1,\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (isThinking) {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\", thinkingSignature: undefined };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\t\tcurrentBlock.thinking += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature = part.thoughtSignature;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcurrentBlock.text += part.text;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (part.functionCall) {\n\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// Generate unique ID if not provided or if it's a duplicate\n\t\t\t\t\t\t\tconst providedId = part.functionCall.id;\n\t\t\t\t\t\t\tconst needsNewId =\n\t\t\t\t\t\t\t\t!providedId || output.content.some((b) => b.type === \"toolCall\" && b.id === providedId);\n\t\t\t\t\t\t\tconst toolCallId = needsNewId\n\t\t\t\t\t\t\t\t? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`\n\t\t\t\t\t\t\t\t: providedId;\n\n\t\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\tid: toolCallId,\n\t\t\t\t\t\t\t\tname: part.functionCall.name || \"\",\n\t\t\t\t\t\t\t\targuments: part.functionCall.args as Record<string, any>,\n\t\t\t\t\t\t\t\t...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\toutput.content.push(toolCall);\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (candidate?.finishReason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(candidate.finishReason);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\")) {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (chunk.usageMetadata) {\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: chunk.usageMetadata.promptTokenCount || 0,\n\t\t\t\t\t\toutput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),\n\t\t\t\t\t\tcacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: chunk.usageMetadata.totalTokenCount || 0,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (currentBlock) {\n\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unkown error ocurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createClient(model: Model<\"google-generative-ai\">, apiKey?: string): GoogleGenAI {\n\tif (!apiKey) {\n\t\tif (!process.env.GEMINI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Gemini API key is required. Set GEMINI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.GEMINI_API_KEY;\n\t}\n\treturn new GoogleGenAI({\n\t\tapiKey,\n\t\thttpOptions: model.headers ? { headers: model.headers } : undefined,\n\t});\n}\n\nfunction buildParams(\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions: GoogleOptions = {},\n): GenerateContentParameters {\n\tconst contents = convertMessages(model, context);\n\n\tconst generationConfig: GenerateContentConfig = {};\n\tif (options.temperature !== undefined) {\n\t\tgenerationConfig.temperature = options.temperature;\n\t}\n\tif (options.maxTokens !== undefined) {\n\t\tgenerationConfig.maxOutputTokens = options.maxTokens;\n\t}\n\n\tconst config: GenerateContentConfig = {\n\t\t...(Object.keys(generationConfig).length > 0 && generationConfig),\n\t\t...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),\n\t\t...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),\n\t};\n\n\tif (context.tools && context.tools.length > 0 && options.toolChoice) {\n\t\tconfig.toolConfig = {\n\t\t\tfunctionCallingConfig: {\n\t\t\t\tmode: mapToolChoice(options.toolChoice),\n\t\t\t},\n\t\t};\n\t} else {\n\t\tconfig.toolConfig = undefined;\n\t}\n\n\tif (options.thinking?.enabled && model.reasoning) {\n\t\tconst thinkingConfig: ThinkingConfig = { includeThoughts: true };\n\t\tif (options.thinking.level !== undefined) {\n\t\t\tthinkingConfig.thinkingLevel = options.thinking.level;\n\t\t} else if (options.thinking.budgetTokens !== undefined) {\n\t\t\tthinkingConfig.thinkingBudget = options.thinking.budgetTokens;\n\t\t}\n\t\tconfig.thinkingConfig = thinkingConfig;\n\t}\n\n\tif (options.signal) {\n\t\tif (options.signal.aborted) {\n\t\t\tthrow new Error(\"Request aborted\");\n\t\t}\n\t\tconfig.abortSignal = options.signal;\n\t}\n\n\tconst params: GenerateContentParameters = {\n\t\tmodel: model.id,\n\t\tcontents,\n\t\tconfig,\n\t};\n\n\treturn params;\n}\nfunction convertMessages(model: Model<\"google-generative-ai\">, context: Context): Content[] {\n\tconst contents: Content[] = [];\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: [{ text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst parts: Part[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn { text: sanitizeSurrogates(item.text) };\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tinlineData: {\n\t\t\t\t\t\t\t\tmimeType: item.mimeType,\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredParts = !model.input.includes(\"image\") ? parts.filter((p) => p.text !== undefined) : parts;\n\t\t\t\tif (filteredParts.length === 0) continue;\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: filteredParts,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst parts: Part[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tparts.push({ text: sanitizeSurrogates(block.text) });\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tconst thinkingPart: Part = {\n\t\t\t\t\t\tthought: true,\n\t\t\t\t\t\tthoughtSignature: block.thinkingSignature,\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t};\n\t\t\t\t\tparts.push(thinkingPart);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst part: Part = {\n\t\t\t\t\t\tfunctionCall: {\n\t\t\t\t\t\t\tid: block.id,\n\t\t\t\t\t\t\tname: block.name,\n\t\t\t\t\t\t\targs: block.arguments,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tif (block.thoughtSignature) {\n\t\t\t\t\t\tpart.thoughtSignature = block.thoughtSignature;\n\t\t\t\t\t}\n\t\t\t\t\tparts.push(part);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (parts.length === 0) continue;\n\t\t\tcontents.push({\n\t\t\t\trole: \"model\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Build parts array with functionResponse and/or images\n\t\t\tconst parts: Part[] = [];\n\n\t\t\t// Extract text and image content\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst imageBlocks = model.input.includes(\"image\") ? msg.content.filter((c) => c.type === \"image\") : [];\n\n\t\t\t// Always add functionResponse with text result (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst hasImages = imageBlocks.length > 0;\n\n\t\t\tparts.push({\n\t\t\t\tfunctionResponse: {\n\t\t\t\t\tid: msg.toolCallId,\n\t\t\t\t\tname: msg.toolName,\n\t\t\t\t\tresponse: {\n\t\t\t\t\t\tresult: hasText ? sanitizeSurrogates(textResult) : hasImages ? \"(see attached image)\" : \"\",\n\t\t\t\t\t\tisError: msg.isError,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t});\n\n\t\t\t// Add any images as inlineData parts\n\t\t\tfor (const imageBlock of imageBlocks) {\n\t\t\t\tparts.push({\n\t\t\t\t\tinlineData: {\n\t\t\t\t\t\tmimeType: (imageBlock as any).mimeType,\n\t\t\t\t\t\tdata: (imageBlock as any).data,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontents.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn contents;\n}\n\nfunction convertTools(tools: Tool[]): any[] | undefined {\n\tif (tools.length === 0) return undefined;\n\treturn [\n\t\t{\n\t\t\tfunctionDeclarations: tools.map((tool) => ({\n\t\t\t\tname: tool.name,\n\t\t\t\tdescription: tool.description,\n\t\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\t})),\n\t\t},\n\t];\n}\n\nfunction mapToolChoice(choice: string): FunctionCallingConfigMode {\n\tswitch (choice) {\n\t\tcase \"auto\":\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t\tcase \"none\":\n\t\t\treturn FunctionCallingConfigMode.NONE;\n\t\tcase \"any\":\n\t\t\treturn FunctionCallingConfigMode.ANY;\n\t\tdefault:\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t}\n}\n\nfunction mapStopReason(reason: FinishReason): StopReason {\n\tswitch (reason) {\n\t\tcase FinishReason.STOP:\n\t\t\treturn \"stop\";\n\t\tcase FinishReason.MAX_TOKENS:\n\t\t\treturn \"length\";\n\t\tcase FinishReason.BLOCKLIST:\n\t\tcase FinishReason.PROHIBITED_CONTENT:\n\t\tcase FinishReason.SPII:\n\t\tcase FinishReason.SAFETY:\n\t\tcase FinishReason.IMAGE_SAFETY:\n\t\tcase FinishReason.IMAGE_PROHIBITED_CONTENT:\n\t\tcase FinishReason.RECITATION:\n\t\tcase FinishReason.FINISH_REASON_UNSPECIFIED:\n\t\tcase FinishReason.OTHER:\n\t\tcase FinishReason.LANGUAGE:\n\t\tcase FinishReason.MALFORMED_FUNCTION_CALL:\n\t\tcase FinishReason.UNEXPECTED_TOOL_CALL:\n\t\tcase FinishReason.NO_IMAGE:\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
1
+ {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAUN,KAAK,aAAa,EAClB,MAAM,eAAe,CAAC;AAEvB,OAAO,KAAK,EAOX,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAMrB,MAAM,WAAW,aAAc,SAAQ,aAAa;IACnD,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC;IACrC,QAAQ,CAAC,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,aAAa,CAAC;KACtB,CAAC;CACF;AAKD,eAAO,MAAM,YAAY,EAAE,cAAc,CAAC,sBAAsB,CAoN/D,CAAC","sourcesContent":["import {\n\ttype Content,\n\tFinishReason,\n\tFunctionCallingConfigMode,\n\ttype GenerateContentConfig,\n\ttype GenerateContentParameters,\n\tGoogleGenAI,\n\ttype Part,\n\ttype Schema,\n\ttype ThinkingConfig,\n\ttype ThinkingLevel,\n} from \"@google/genai\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface GoogleOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\";\n\tthinking?: {\n\t\tenabled: boolean;\n\t\tbudgetTokens?: number; // -1 for dynamic, 0 to disable\n\t\tlevel?: ThinkingLevel;\n\t};\n}\n\n// Counter for generating unique tool call IDs\nlet toolCallCounter = 0;\n\nexport const streamGoogle: StreamFunction<\"google-generative-ai\"> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: GoogleOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"google-generative-ai\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst client = createClient(model, options?.apiKey);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\tconst googleStream = await client.models.generateContentStream(params);\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tfor await (const chunk of googleStream) {\n\t\t\t\tconst candidate = chunk.candidates?.[0];\n\t\t\t\tif (candidate?.content?.parts) {\n\t\t\t\t\tfor (const part of candidate.content.parts) {\n\t\t\t\t\t\tif (part.text !== undefined) {\n\t\t\t\t\t\t\tconst isThinking = part.thought === true;\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\t(isThinking && currentBlock.type !== \"thinking\") ||\n\t\t\t\t\t\t\t\t(!isThinking && currentBlock.type !== \"text\")\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blocks.length - 1,\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (isThinking) {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\", thinkingSignature: undefined };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\t\tcurrentBlock.thinking += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature = part.thoughtSignature;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcurrentBlock.text += part.text;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (part.functionCall) {\n\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// Generate unique ID if not provided or if it's a duplicate\n\t\t\t\t\t\t\tconst providedId = part.functionCall.id;\n\t\t\t\t\t\t\tconst needsNewId =\n\t\t\t\t\t\t\t\t!providedId || output.content.some((b) => b.type === \"toolCall\" && b.id === providedId);\n\t\t\t\t\t\t\tconst toolCallId = needsNewId\n\t\t\t\t\t\t\t\t? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`\n\t\t\t\t\t\t\t\t: providedId;\n\n\t\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\tid: toolCallId,\n\t\t\t\t\t\t\t\tname: part.functionCall.name || \"\",\n\t\t\t\t\t\t\t\targuments: part.functionCall.args as Record<string, any>,\n\t\t\t\t\t\t\t\t...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\toutput.content.push(toolCall);\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (candidate?.finishReason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(candidate.finishReason);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\")) {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (chunk.usageMetadata) {\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: chunk.usageMetadata.promptTokenCount || 0,\n\t\t\t\t\t\toutput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),\n\t\t\t\t\t\tcacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: chunk.usageMetadata.totalTokenCount || 0,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (currentBlock) {\n\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unkown error ocurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\t// Remove internal index property used during streaming\n\t\t\tfor (const block of output.content) {\n\t\t\t\tif (\"index\" in block) {\n\t\t\t\t\tdelete (block as { index?: number }).index;\n\t\t\t\t}\n\t\t\t}\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createClient(model: Model<\"google-generative-ai\">, apiKey?: string): GoogleGenAI {\n\tif (!apiKey) {\n\t\tif (!process.env.GEMINI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Gemini API key is required. Set GEMINI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.GEMINI_API_KEY;\n\t}\n\n\tconst httpOptions: { baseUrl?: string; headers?: Record<string, string> } = {};\n\tif (model.baseUrl) {\n\t\thttpOptions.baseUrl = model.baseUrl;\n\t}\n\tif (model.headers) {\n\t\thttpOptions.headers = model.headers;\n\t}\n\n\treturn new GoogleGenAI({\n\t\tapiKey,\n\t\thttpOptions: Object.keys(httpOptions).length > 0 ? httpOptions : undefined,\n\t});\n}\n\nfunction buildParams(\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions: GoogleOptions = {},\n): GenerateContentParameters {\n\tconst contents = convertMessages(model, context);\n\n\tconst generationConfig: GenerateContentConfig = {};\n\tif (options.temperature !== undefined) {\n\t\tgenerationConfig.temperature = options.temperature;\n\t}\n\tif (options.maxTokens !== undefined) {\n\t\tgenerationConfig.maxOutputTokens = options.maxTokens;\n\t}\n\n\tconst config: GenerateContentConfig = {\n\t\t...(Object.keys(generationConfig).length > 0 && generationConfig),\n\t\t...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),\n\t\t...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),\n\t};\n\n\tif (context.tools && context.tools.length > 0 && options.toolChoice) {\n\t\tconfig.toolConfig = {\n\t\t\tfunctionCallingConfig: {\n\t\t\t\tmode: mapToolChoice(options.toolChoice),\n\t\t\t},\n\t\t};\n\t} else {\n\t\tconfig.toolConfig = undefined;\n\t}\n\n\tif (options.thinking?.enabled && model.reasoning) {\n\t\tconst thinkingConfig: ThinkingConfig = { includeThoughts: true };\n\t\tif (options.thinking.level !== undefined) {\n\t\t\tthinkingConfig.thinkingLevel = options.thinking.level;\n\t\t} else if (options.thinking.budgetTokens !== undefined) {\n\t\t\tthinkingConfig.thinkingBudget = options.thinking.budgetTokens;\n\t\t}\n\t\tconfig.thinkingConfig = thinkingConfig;\n\t}\n\n\tif (options.signal) {\n\t\tif (options.signal.aborted) {\n\t\t\tthrow new Error(\"Request aborted\");\n\t\t}\n\t\tconfig.abortSignal = options.signal;\n\t}\n\n\tconst params: GenerateContentParameters = {\n\t\tmodel: model.id,\n\t\tcontents,\n\t\tconfig,\n\t};\n\n\treturn params;\n}\nfunction convertMessages(model: Model<\"google-generative-ai\">, context: Context): Content[] {\n\tconst contents: Content[] = [];\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: [{ text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst parts: Part[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn { text: sanitizeSurrogates(item.text) };\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tinlineData: {\n\t\t\t\t\t\t\t\tmimeType: item.mimeType,\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredParts = !model.input.includes(\"image\") ? parts.filter((p) => p.text !== undefined) : parts;\n\t\t\t\tif (filteredParts.length === 0) continue;\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: filteredParts,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst parts: Part[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tparts.push({ text: sanitizeSurrogates(block.text) });\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tconst thinkingPart: Part = {\n\t\t\t\t\t\tthought: true,\n\t\t\t\t\t\tthoughtSignature: block.thinkingSignature,\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t};\n\t\t\t\t\tparts.push(thinkingPart);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst part: Part = {\n\t\t\t\t\t\tfunctionCall: {\n\t\t\t\t\t\t\tid: block.id,\n\t\t\t\t\t\t\tname: block.name,\n\t\t\t\t\t\t\targs: block.arguments,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tif (block.thoughtSignature) {\n\t\t\t\t\t\tpart.thoughtSignature = block.thoughtSignature;\n\t\t\t\t\t}\n\t\t\t\t\tparts.push(part);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (parts.length === 0) continue;\n\t\t\tcontents.push({\n\t\t\t\trole: \"model\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Build parts array with functionResponse and/or images\n\t\t\tconst parts: Part[] = [];\n\n\t\t\t// Extract text and image content\n\t\t\tconst textContent = msg.content.filter((c): c is TextContent => c.type === \"text\");\n\t\t\tconst textResult = textContent.map((c) => c.text).join(\"\\n\");\n\t\t\tconst imageContent = model.input.includes(\"image\")\n\t\t\t\t? msg.content.filter((c): c is ImageContent => c.type === \"image\")\n\t\t\t\t: [];\n\n\t\t\t// Always add functionResponse with text result (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst hasImages = imageContent.length > 0;\n\n\t\t\t// Use \"output\" key for success, \"error\" key for errors as per SDK documentation\n\t\t\tconst responseValue = hasText ? sanitizeSurrogates(textResult) : hasImages ? \"(see attached image)\" : \"\";\n\n\t\t\tparts.push({\n\t\t\t\tfunctionResponse: {\n\t\t\t\t\tid: msg.toolCallId,\n\t\t\t\t\tname: msg.toolName,\n\t\t\t\t\tresponse: msg.isError ? { error: responseValue } : { output: responseValue },\n\t\t\t\t},\n\t\t\t});\n\n\t\t\t// Add any images as inlineData parts\n\t\t\tfor (const imageBlock of imageContent) {\n\t\t\t\tparts.push({\n\t\t\t\t\tinlineData: {\n\t\t\t\t\t\tmimeType: imageBlock.mimeType,\n\t\t\t\t\t\tdata: imageBlock.data,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontents.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn contents;\n}\n\nfunction convertTools(\n\ttools: Tool[],\n): { functionDeclarations: { name: string; description?: string; parameters: Schema }[] }[] | undefined {\n\tif (tools.length === 0) return undefined;\n\treturn [\n\t\t{\n\t\t\tfunctionDeclarations: tools.map((tool) => ({\n\t\t\t\tname: tool.name,\n\t\t\t\tdescription: tool.description,\n\t\t\t\tparameters: tool.parameters as Schema, // TypeBox generates JSON Schema compatible with SDK Schema type\n\t\t\t})),\n\t\t},\n\t];\n}\n\nfunction mapToolChoice(choice: string): FunctionCallingConfigMode {\n\tswitch (choice) {\n\t\tcase \"auto\":\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t\tcase \"none\":\n\t\t\treturn FunctionCallingConfigMode.NONE;\n\t\tcase \"any\":\n\t\t\treturn FunctionCallingConfigMode.ANY;\n\t\tdefault:\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t}\n}\n\nfunction mapStopReason(reason: FinishReason): StopReason {\n\tswitch (reason) {\n\t\tcase FinishReason.STOP:\n\t\t\treturn \"stop\";\n\t\tcase FinishReason.MAX_TOKENS:\n\t\t\treturn \"length\";\n\t\tcase FinishReason.BLOCKLIST:\n\t\tcase FinishReason.PROHIBITED_CONTENT:\n\t\tcase FinishReason.SPII:\n\t\tcase FinishReason.SAFETY:\n\t\tcase FinishReason.IMAGE_SAFETY:\n\t\tcase FinishReason.IMAGE_PROHIBITED_CONTENT:\n\t\tcase FinishReason.IMAGE_RECITATION:\n\t\tcase FinishReason.IMAGE_OTHER:\n\t\tcase FinishReason.RECITATION:\n\t\tcase FinishReason.FINISH_REASON_UNSPECIFIED:\n\t\tcase FinishReason.OTHER:\n\t\tcase FinishReason.LANGUAGE:\n\t\tcase FinishReason.MALFORMED_FUNCTION_CALL:\n\t\tcase FinishReason.UNEXPECTED_TOOL_CALL:\n\t\tcase FinishReason.NO_IMAGE:\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
@@ -188,8 +188,12 @@ export const streamGoogle = (model, context, options) => {
188
188
  stream.end();
189
189
  }
190
190
  catch (error) {
191
- for (const block of output.content)
192
- delete block.index;
191
+ // Remove internal index property used during streaming
192
+ for (const block of output.content) {
193
+ if ("index" in block) {
194
+ delete block.index;
195
+ }
196
+ }
193
197
  output.stopReason = options?.signal?.aborted ? "aborted" : "error";
194
198
  output.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
195
199
  stream.push({ type: "error", reason: output.stopReason, error: output });
@@ -205,9 +209,16 @@ function createClient(model, apiKey) {
205
209
  }
206
210
  apiKey = process.env.GEMINI_API_KEY;
207
211
  }
212
+ const httpOptions = {};
213
+ if (model.baseUrl) {
214
+ httpOptions.baseUrl = model.baseUrl;
215
+ }
216
+ if (model.headers) {
217
+ httpOptions.headers = model.headers;
218
+ }
208
219
  return new GoogleGenAI({
209
220
  apiKey,
210
- httpOptions: model.headers ? { headers: model.headers } : undefined,
221
+ httpOptions: Object.keys(httpOptions).length > 0 ? httpOptions : undefined,
211
222
  });
212
223
  }
213
224
  function buildParams(model, context, options = {}) {
@@ -330,26 +341,25 @@ function convertMessages(model, context) {
330
341
  // Build parts array with functionResponse and/or images
331
342
  const parts = [];
332
343
  // Extract text and image content
333
- const textResult = msg.content
334
- .filter((c) => c.type === "text")
335
- .map((c) => c.text)
336
- .join("\n");
337
- const imageBlocks = model.input.includes("image") ? msg.content.filter((c) => c.type === "image") : [];
344
+ const textContent = msg.content.filter((c) => c.type === "text");
345
+ const textResult = textContent.map((c) => c.text).join("\n");
346
+ const imageContent = model.input.includes("image")
347
+ ? msg.content.filter((c) => c.type === "image")
348
+ : [];
338
349
  // Always add functionResponse with text result (or placeholder if only images)
339
350
  const hasText = textResult.length > 0;
340
- const hasImages = imageBlocks.length > 0;
351
+ const hasImages = imageContent.length > 0;
352
+ // Use "output" key for success, "error" key for errors as per SDK documentation
353
+ const responseValue = hasText ? sanitizeSurrogates(textResult) : hasImages ? "(see attached image)" : "";
341
354
  parts.push({
342
355
  functionResponse: {
343
356
  id: msg.toolCallId,
344
357
  name: msg.toolName,
345
- response: {
346
- result: hasText ? sanitizeSurrogates(textResult) : hasImages ? "(see attached image)" : "",
347
- isError: msg.isError,
348
- },
358
+ response: msg.isError ? { error: responseValue } : { output: responseValue },
349
359
  },
350
360
  });
351
361
  // Add any images as inlineData parts
352
- for (const imageBlock of imageBlocks) {
362
+ for (const imageBlock of imageContent) {
353
363
  parts.push({
354
364
  inlineData: {
355
365
  mimeType: imageBlock.mimeType,
@@ -373,7 +383,7 @@ function convertTools(tools) {
373
383
  functionDeclarations: tools.map((tool) => ({
374
384
  name: tool.name,
375
385
  description: tool.description,
376
- parameters: tool.parameters, // TypeBox already generates JSON Schema
386
+ parameters: tool.parameters, // TypeBox generates JSON Schema compatible with SDK Schema type
377
387
  })),
378
388
  },
379
389
  ];
@@ -402,6 +412,8 @@ function mapStopReason(reason) {
402
412
  case FinishReason.SAFETY:
403
413
  case FinishReason.IMAGE_SAFETY:
404
414
  case FinishReason.IMAGE_PROHIBITED_CONTENT:
415
+ case FinishReason.IMAGE_RECITATION:
416
+ case FinishReason.IMAGE_OTHER:
405
417
  case FinishReason.RECITATION:
406
418
  case FinishReason.FINISH_REASON_UNSPECIFIED:
407
419
  case FinishReason.OTHER:
@@ -1 +1 @@
1
- {"version":3,"file":"google.js","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAEN,YAAY,EACZ,yBAAyB,EAGzB,WAAW,GAIX,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAc7C,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,wBAAwB,CAAC;AAW3D,8CAA8C;AAC9C,IAAI,eAAe,GAAG,CAAC,CAAC;AAExB,MAAM,CAAC,MAAM,YAAY,GAA2C,CACnE,KAAoC,EACpC,OAAgB,EAChB,OAAuB,EACO,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,sBAA6B;YAClC,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACpD,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACpD,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;YAEvE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAChD,IAAI,YAAY,GAAyC,IAAI,CAAC;YAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;YAC9B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;YAC3C,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;gBACxC,MAAM,SAAS,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;gBACxC,IAAI,SAAS,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;oBAC/B,KAAK,MAAM,IAAI,IAAI,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;wBAC5C,IAAI,IAAI,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;4BAC7B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC;4BACzC,IACC,CAAC,YAAY;gCACb,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,CAAC;gCAChD,CAAC,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,CAAC,EAC5C,CAAC;gCACF,IAAI,YAAY,EAAE,CAAC;oCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wCAClC,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,UAAU;4CAChB,YAAY,EAAE,MAAM,CAAC,MAAM,GAAG,CAAC;4CAC/B,OAAO,EAAE,YAAY,CAAC,IAAI;4CAC1B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;yCAAM,CAAC;wCACP,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,cAAc;4CACpB,YAAY,EAAE,UAAU,EAAE;4CAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;4CAC9B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;gCACF,CAAC;gCACD,IAAI,UAAU,EAAE,CAAC;oCAChB,YAAY,GAAG,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,iBAAiB,EAAE,SAAS,EAAE,CAAC;oCAChF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCACtF,CAAC;qCAAM,CAAC;oCACP,YAAY,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;oCAC1C,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCAClF,CAAC;4BACF,CAAC;4BACD,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gCACtC,YAAY,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;gCACnC,YAAY,CAAC,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC;gCACvD,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,gBAAgB;oCACtB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;iCAAM,CAAC;gCACP,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC;gCAC/B,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,YAAY;oCAClB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;wBACF,CAAC;wBAED,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;4BACvB,IAAI,YAAY,EAAE,CAAC;gCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,UAAU;wCAChB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wCAC1B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;qCAAM,CAAC;oCACP,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,cAAc;wCACpB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wCAC9B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;gCACD,YAAY,GAAG,IAAI,CAAC;4BACrB,CAAC;4BAED,4DAA4D;4BAC5D,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;4BACxC,MAAM,UAAU,GACf,CAAC,UAAU,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,UAAU,CAAC,CAAC;4BACzF,MAAM,UAAU,GAAG,UAAU;gCAC5B,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,EAAE;gCAChE,CAAC,CAAC,UAAU,CAAC;4BAEd,MAAM,QAAQ,GAAa;gCAC1B,IAAI,EAAE,UAAU;gCAChB,EAAE,EAAE,UAAU;gCACd,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,EAAE;gCAClC,SAAS,EAAE,IAAI,CAAC,YAAY,CAAC,IAA2B;gCACxD,GAAG,CAAC,IAAI,CAAC,gBAAgB,IAAI,EAAE,gBAAgB,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC;6BACzE,CAAC;4BAEF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;4BAC9B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;4BACrF,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,CAAC;gCACzC,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;4BACH,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBAC9F,CAAC;oBACF,CAAC;gBACF,CAAC;gBAED,IAAI,SAAS,EAAE,YAAY,EAAE,CAAC;oBAC7B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,EAAE,CAAC;wBACvD,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;oBAC/B,CAAC;gBACF,CAAC;gBAED,IAAI,KAAK,CAAC,aAAa,EAAE,CAAC;oBACzB,MAAM,CAAC,KAAK,GAAG;wBACd,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC;wBAChD,MAAM,EACL,CAAC,KAAK,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,aAAa,CAAC,kBAAkB,IAAI,CAAC,CAAC;wBAChG,SAAS,EAAE,KAAK,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC;wBAC3D,UAAU,EAAE,CAAC;wBACb,WAAW,EAAE,KAAK,CAAC,aAAa,CAAC,eAAe,IAAI,CAAC;wBACrD,IAAI,EAAE;4BACL,KAAK,EAAE,CAAC;4BACR,MAAM,EAAE,CAAC;4BACT,SAAS,EAAE,CAAC;4BACZ,UAAU,EAAE,CAAC;4BACb,KAAK,EAAE,CAAC;yBACR;qBACD,CAAC;oBACF,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;YACF,CAAC;YAED,IAAI,YAAY,EAAE,CAAC;gBAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAClC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,UAAU;wBAChB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wBAC1B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACP,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,cAAc;wBACpB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wBAC9B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YAED,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC5C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO;gBAAE,OAAQ,KAAa,CAAC,KAAK,CAAC;YAChE,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,SAAS,YAAY,CAAC,KAAoC,EAAE,MAAe,EAAe;IACzF,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CACd,gGAAgG,CAChG,CAAC;QACH,CAAC;QACD,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACrC,CAAC;IACD,OAAO,IAAI,WAAW,CAAC;QACtB,MAAM;QACN,WAAW,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,SAAS;KACnE,CAAC,CAAC;AAAA,CACH;AAED,SAAS,WAAW,CACnB,KAAoC,EACpC,OAAgB,EAChB,OAAO,GAAkB,EAAE,EACC;IAC5B,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAEjD,MAAM,gBAAgB,GAA0B,EAAE,CAAC;IACnD,IAAI,OAAO,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;QACvC,gBAAgB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IACpD,CAAC;IACD,IAAI,OAAO,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;QACrC,gBAAgB,CAAC,eAAe,GAAG,OAAO,CAAC,SAAS,CAAC;IACtD,CAAC;IAED,MAAM,MAAM,GAA0B;QACrC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,gBAAgB,CAAC;QACjE,GAAG,CAAC,OAAO,CAAC,YAAY,IAAI,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,CAAC;QAC5F,GAAG,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;KACxF,CAAC;IAEF,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;QACrE,MAAM,CAAC,UAAU,GAAG;YACnB,qBAAqB,EAAE;gBACtB,IAAI,EAAE,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC;aACvC;SACD,CAAC;IACH,CAAC;SAAM,CAAC;QACP,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/B,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,EAAE,OAAO,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QAClD,MAAM,cAAc,GAAmB,EAAE,eAAe,EAAE,IAAI,EAAE,CAAC;QACjE,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC1C,cAAc,CAAC,aAAa,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;QACvD,CAAC;aAAM,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,KAAK,SAAS,EAAE,CAAC;YACxD,cAAc,CAAC,cAAc,GAAG,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;QAC/D,CAAC;QACD,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC;IACxC,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QACpB,IAAI,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAC5B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;QACpC,CAAC;QACD,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,MAAM,CAAC;IACrC,CAAC;IAED,MAAM,MAAM,GAA8B;QACzC,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ;QACR,MAAM;KACN,CAAC;IAEF,OAAO,MAAM,CAAC;AAAA,CACd;AACD,SAAS,eAAe,CAAC,KAAoC,EAAE,OAAgB,EAAa;IAC3F,MAAM,QAAQ,GAAc,EAAE,CAAC;IAC/B,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAEvE,KAAK,MAAM,GAAG,IAAI,mBAAmB,EAAE,CAAC;QACvC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,QAAQ,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,MAAM;oBACZ,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;iBAClD,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,MAAM,KAAK,GAAW,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;oBAC/C,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO,EAAE,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;oBAChD,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,UAAU,EAAE;gCACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;gCACvB,IAAI,EAAE,IAAI,CAAC,IAAI;6BACf;yBACD,CAAC;oBACH,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,MAAM,aAAa,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;gBACzG,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACzC,QAAQ,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,MAAM;oBACZ,KAAK,EAAE,aAAa;iBACpB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,MAAM,KAAK,GAAW,EAAE,CAAC;YAEzB,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;gBACjC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBACtD,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,YAAY,GAAS;wBAC1B,OAAO,EAAE,IAAI;wBACb,gBAAgB,EAAE,KAAK,CAAC,iBAAiB;wBACzC,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;qBACxC,CAAC;oBACF,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC1B,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,IAAI,GAAS;wBAClB,YAAY,EAAE;4BACb,EAAE,EAAE,KAAK,CAAC,EAAE;4BACZ,IAAI,EAAE,KAAK,CAAC,IAAI;4BAChB,IAAI,EAAE,KAAK,CAAC,SAAS;yBACrB;qBACD,CAAC;oBACF,IAAI,KAAK,CAAC,gBAAgB,EAAE,CAAC;wBAC5B,IAAI,CAAC,gBAAgB,GAAG,KAAK,CAAC,gBAAgB,CAAC;oBAChD,CAAC;oBACD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAClB,CAAC;YACF,CAAC;YAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACjC,QAAQ,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,OAAO;gBACb,KAAK;aACL,CAAC,CAAC;QACJ,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,wDAAwD;YACxD,MAAM,KAAK,GAAW,EAAE,CAAC;YAEzB,iCAAiC;YACjC,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO;iBAC5B,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;iBAChC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAS,CAAC,IAAI,CAAC;iBAC3B,IAAI,CAAC,IAAI,CAAC,CAAC;YACb,MAAM,WAAW,GAAG,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAEvG,+EAA+E;YAC/E,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;YACtC,MAAM,SAAS,GAAG,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC;YAEzC,KAAK,CAAC,IAAI,CAAC;gBACV,gBAAgB,EAAE;oBACjB,EAAE,EAAE,GAAG,CAAC,UAAU;oBAClB,IAAI,EAAE,GAAG,CAAC,QAAQ;oBAClB,QAAQ,EAAE;wBACT,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,EAAE;wBAC1F,OAAO,EAAE,GAAG,CAAC,OAAO;qBACpB;iBACD;aACD,CAAC,CAAC;YAEH,qCAAqC;YACrC,KAAK,MAAM,UAAU,IAAI,WAAW,EAAE,CAAC;gBACtC,KAAK,CAAC,IAAI,CAAC;oBACV,UAAU,EAAE;wBACX,QAAQ,EAAG,UAAkB,CAAC,QAAQ;wBACtC,IAAI,EAAG,UAAkB,CAAC,IAAI;qBAC9B;iBACD,CAAC,CAAC;YACJ,CAAC;YAED,QAAQ,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,MAAM;gBACZ,KAAK;aACL,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,OAAO,QAAQ,CAAC;AAAA,CAChB;AAED,SAAS,YAAY,CAAC,KAAa,EAAqB;IACvD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,SAAS,CAAC;IACzC,OAAO;QACN;YACC,oBAAoB,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;gBAC1C,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,UAAU,EAAE,IAAI,CAAC,UAAiB,EAAE,wCAAwC;aAC5E,CAAC,CAAC;SACH;KACD,CAAC;AAAA,CACF;AAED,SAAS,aAAa,CAAC,MAAc,EAA6B;IACjE,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,MAAM;YACV,OAAO,yBAAyB,CAAC,IAAI,CAAC;QACvC,KAAK,MAAM;YACV,OAAO,yBAAyB,CAAC,IAAI,CAAC;QACvC,KAAK,KAAK;YACT,OAAO,yBAAyB,CAAC,GAAG,CAAC;QACtC;YACC,OAAO,yBAAyB,CAAC,IAAI,CAAC;IACxC,CAAC;AAAA,CACD;AAED,SAAS,aAAa,CAAC,MAAoB,EAAc;IACxD,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,YAAY,CAAC,IAAI;YACrB,OAAO,MAAM,CAAC;QACf,KAAK,YAAY,CAAC,UAAU;YAC3B,OAAO,QAAQ,CAAC;QACjB,KAAK,YAAY,CAAC,SAAS,CAAC;QAC5B,KAAK,YAAY,CAAC,kBAAkB,CAAC;QACrC,KAAK,YAAY,CAAC,IAAI,CAAC;QACvB,KAAK,YAAY,CAAC,MAAM,CAAC;QACzB,KAAK,YAAY,CAAC,YAAY,CAAC;QAC/B,KAAK,YAAY,CAAC,wBAAwB,CAAC;QAC3C,KAAK,YAAY,CAAC,UAAU,CAAC;QAC7B,KAAK,YAAY,CAAC,yBAAyB,CAAC;QAC5C,KAAK,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,YAAY,CAAC,QAAQ,CAAC;QAC3B,KAAK,YAAY,CAAC,uBAAuB,CAAC;QAC1C,KAAK,YAAY,CAAC,oBAAoB,CAAC;QACvC,KAAK,YAAY,CAAC,QAAQ;YACzB,OAAO,OAAO,CAAC;QAChB,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD","sourcesContent":["import {\n\ttype Content,\n\tFinishReason,\n\tFunctionCallingConfigMode,\n\ttype GenerateContentConfig,\n\ttype GenerateContentParameters,\n\tGoogleGenAI,\n\ttype Part,\n\ttype ThinkingConfig,\n\ttype ThinkingLevel,\n} from \"@google/genai\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface GoogleOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\";\n\tthinking?: {\n\t\tenabled: boolean;\n\t\tbudgetTokens?: number; // -1 for dynamic, 0 to disable\n\t\tlevel?: ThinkingLevel;\n\t};\n}\n\n// Counter for generating unique tool call IDs\nlet toolCallCounter = 0;\n\nexport const streamGoogle: StreamFunction<\"google-generative-ai\"> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: GoogleOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"google-generative-ai\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst client = createClient(model, options?.apiKey);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\tconst googleStream = await client.models.generateContentStream(params);\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tfor await (const chunk of googleStream) {\n\t\t\t\tconst candidate = chunk.candidates?.[0];\n\t\t\t\tif (candidate?.content?.parts) {\n\t\t\t\t\tfor (const part of candidate.content.parts) {\n\t\t\t\t\t\tif (part.text !== undefined) {\n\t\t\t\t\t\t\tconst isThinking = part.thought === true;\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\t(isThinking && currentBlock.type !== \"thinking\") ||\n\t\t\t\t\t\t\t\t(!isThinking && currentBlock.type !== \"text\")\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blocks.length - 1,\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (isThinking) {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\", thinkingSignature: undefined };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\t\tcurrentBlock.thinking += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature = part.thoughtSignature;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcurrentBlock.text += part.text;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (part.functionCall) {\n\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// Generate unique ID if not provided or if it's a duplicate\n\t\t\t\t\t\t\tconst providedId = part.functionCall.id;\n\t\t\t\t\t\t\tconst needsNewId =\n\t\t\t\t\t\t\t\t!providedId || output.content.some((b) => b.type === \"toolCall\" && b.id === providedId);\n\t\t\t\t\t\t\tconst toolCallId = needsNewId\n\t\t\t\t\t\t\t\t? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`\n\t\t\t\t\t\t\t\t: providedId;\n\n\t\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\tid: toolCallId,\n\t\t\t\t\t\t\t\tname: part.functionCall.name || \"\",\n\t\t\t\t\t\t\t\targuments: part.functionCall.args as Record<string, any>,\n\t\t\t\t\t\t\t\t...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\toutput.content.push(toolCall);\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (candidate?.finishReason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(candidate.finishReason);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\")) {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (chunk.usageMetadata) {\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: chunk.usageMetadata.promptTokenCount || 0,\n\t\t\t\t\t\toutput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),\n\t\t\t\t\t\tcacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: chunk.usageMetadata.totalTokenCount || 0,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (currentBlock) {\n\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unkown error ocurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createClient(model: Model<\"google-generative-ai\">, apiKey?: string): GoogleGenAI {\n\tif (!apiKey) {\n\t\tif (!process.env.GEMINI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Gemini API key is required. Set GEMINI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.GEMINI_API_KEY;\n\t}\n\treturn new GoogleGenAI({\n\t\tapiKey,\n\t\thttpOptions: model.headers ? { headers: model.headers } : undefined,\n\t});\n}\n\nfunction buildParams(\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions: GoogleOptions = {},\n): GenerateContentParameters {\n\tconst contents = convertMessages(model, context);\n\n\tconst generationConfig: GenerateContentConfig = {};\n\tif (options.temperature !== undefined) {\n\t\tgenerationConfig.temperature = options.temperature;\n\t}\n\tif (options.maxTokens !== undefined) {\n\t\tgenerationConfig.maxOutputTokens = options.maxTokens;\n\t}\n\n\tconst config: GenerateContentConfig = {\n\t\t...(Object.keys(generationConfig).length > 0 && generationConfig),\n\t\t...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),\n\t\t...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),\n\t};\n\n\tif (context.tools && context.tools.length > 0 && options.toolChoice) {\n\t\tconfig.toolConfig = {\n\t\t\tfunctionCallingConfig: {\n\t\t\t\tmode: mapToolChoice(options.toolChoice),\n\t\t\t},\n\t\t};\n\t} else {\n\t\tconfig.toolConfig = undefined;\n\t}\n\n\tif (options.thinking?.enabled && model.reasoning) {\n\t\tconst thinkingConfig: ThinkingConfig = { includeThoughts: true };\n\t\tif (options.thinking.level !== undefined) {\n\t\t\tthinkingConfig.thinkingLevel = options.thinking.level;\n\t\t} else if (options.thinking.budgetTokens !== undefined) {\n\t\t\tthinkingConfig.thinkingBudget = options.thinking.budgetTokens;\n\t\t}\n\t\tconfig.thinkingConfig = thinkingConfig;\n\t}\n\n\tif (options.signal) {\n\t\tif (options.signal.aborted) {\n\t\t\tthrow new Error(\"Request aborted\");\n\t\t}\n\t\tconfig.abortSignal = options.signal;\n\t}\n\n\tconst params: GenerateContentParameters = {\n\t\tmodel: model.id,\n\t\tcontents,\n\t\tconfig,\n\t};\n\n\treturn params;\n}\nfunction convertMessages(model: Model<\"google-generative-ai\">, context: Context): Content[] {\n\tconst contents: Content[] = [];\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: [{ text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst parts: Part[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn { text: sanitizeSurrogates(item.text) };\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tinlineData: {\n\t\t\t\t\t\t\t\tmimeType: item.mimeType,\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredParts = !model.input.includes(\"image\") ? parts.filter((p) => p.text !== undefined) : parts;\n\t\t\t\tif (filteredParts.length === 0) continue;\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: filteredParts,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst parts: Part[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tparts.push({ text: sanitizeSurrogates(block.text) });\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tconst thinkingPart: Part = {\n\t\t\t\t\t\tthought: true,\n\t\t\t\t\t\tthoughtSignature: block.thinkingSignature,\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t};\n\t\t\t\t\tparts.push(thinkingPart);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst part: Part = {\n\t\t\t\t\t\tfunctionCall: {\n\t\t\t\t\t\t\tid: block.id,\n\t\t\t\t\t\t\tname: block.name,\n\t\t\t\t\t\t\targs: block.arguments,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tif (block.thoughtSignature) {\n\t\t\t\t\t\tpart.thoughtSignature = block.thoughtSignature;\n\t\t\t\t\t}\n\t\t\t\t\tparts.push(part);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (parts.length === 0) continue;\n\t\t\tcontents.push({\n\t\t\t\trole: \"model\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Build parts array with functionResponse and/or images\n\t\t\tconst parts: Part[] = [];\n\n\t\t\t// Extract text and image content\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst imageBlocks = model.input.includes(\"image\") ? msg.content.filter((c) => c.type === \"image\") : [];\n\n\t\t\t// Always add functionResponse with text result (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst hasImages = imageBlocks.length > 0;\n\n\t\t\tparts.push({\n\t\t\t\tfunctionResponse: {\n\t\t\t\t\tid: msg.toolCallId,\n\t\t\t\t\tname: msg.toolName,\n\t\t\t\t\tresponse: {\n\t\t\t\t\t\tresult: hasText ? sanitizeSurrogates(textResult) : hasImages ? \"(see attached image)\" : \"\",\n\t\t\t\t\t\tisError: msg.isError,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t});\n\n\t\t\t// Add any images as inlineData parts\n\t\t\tfor (const imageBlock of imageBlocks) {\n\t\t\t\tparts.push({\n\t\t\t\t\tinlineData: {\n\t\t\t\t\t\tmimeType: (imageBlock as any).mimeType,\n\t\t\t\t\t\tdata: (imageBlock as any).data,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontents.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn contents;\n}\n\nfunction convertTools(tools: Tool[]): any[] | undefined {\n\tif (tools.length === 0) return undefined;\n\treturn [\n\t\t{\n\t\t\tfunctionDeclarations: tools.map((tool) => ({\n\t\t\t\tname: tool.name,\n\t\t\t\tdescription: tool.description,\n\t\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\t})),\n\t\t},\n\t];\n}\n\nfunction mapToolChoice(choice: string): FunctionCallingConfigMode {\n\tswitch (choice) {\n\t\tcase \"auto\":\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t\tcase \"none\":\n\t\t\treturn FunctionCallingConfigMode.NONE;\n\t\tcase \"any\":\n\t\t\treturn FunctionCallingConfigMode.ANY;\n\t\tdefault:\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t}\n}\n\nfunction mapStopReason(reason: FinishReason): StopReason {\n\tswitch (reason) {\n\t\tcase FinishReason.STOP:\n\t\t\treturn \"stop\";\n\t\tcase FinishReason.MAX_TOKENS:\n\t\t\treturn \"length\";\n\t\tcase FinishReason.BLOCKLIST:\n\t\tcase FinishReason.PROHIBITED_CONTENT:\n\t\tcase FinishReason.SPII:\n\t\tcase FinishReason.SAFETY:\n\t\tcase FinishReason.IMAGE_SAFETY:\n\t\tcase FinishReason.IMAGE_PROHIBITED_CONTENT:\n\t\tcase FinishReason.RECITATION:\n\t\tcase FinishReason.FINISH_REASON_UNSPECIFIED:\n\t\tcase FinishReason.OTHER:\n\t\tcase FinishReason.LANGUAGE:\n\t\tcase FinishReason.MALFORMED_FUNCTION_CALL:\n\t\tcase FinishReason.UNEXPECTED_TOOL_CALL:\n\t\tcase FinishReason.NO_IMAGE:\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
1
+ {"version":3,"file":"google.js","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAEN,YAAY,EACZ,yBAAyB,EAGzB,WAAW,GAKX,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAe7C,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,wBAAwB,CAAC;AAW3D,8CAA8C;AAC9C,IAAI,eAAe,GAAG,CAAC,CAAC;AAExB,MAAM,CAAC,MAAM,YAAY,GAA2C,CACnE,KAAoC,EACpC,OAAgB,EAChB,OAAuB,EACO,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,sBAA6B;YAClC,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACpD,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACpD,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;YAEvE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAChD,IAAI,YAAY,GAAyC,IAAI,CAAC;YAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;YAC9B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;YAC3C,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;gBACxC,MAAM,SAAS,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;gBACxC,IAAI,SAAS,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;oBAC/B,KAAK,MAAM,IAAI,IAAI,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;wBAC5C,IAAI,IAAI,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;4BAC7B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC;4BACzC,IACC,CAAC,YAAY;gCACb,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,CAAC;gCAChD,CAAC,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,CAAC,EAC5C,CAAC;gCACF,IAAI,YAAY,EAAE,CAAC;oCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wCAClC,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,UAAU;4CAChB,YAAY,EAAE,MAAM,CAAC,MAAM,GAAG,CAAC;4CAC/B,OAAO,EAAE,YAAY,CAAC,IAAI;4CAC1B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;yCAAM,CAAC;wCACP,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,cAAc;4CACpB,YAAY,EAAE,UAAU,EAAE;4CAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;4CAC9B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;gCACF,CAAC;gCACD,IAAI,UAAU,EAAE,CAAC;oCAChB,YAAY,GAAG,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,iBAAiB,EAAE,SAAS,EAAE,CAAC;oCAChF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCACtF,CAAC;qCAAM,CAAC;oCACP,YAAY,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;oCAC1C,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCAClF,CAAC;4BACF,CAAC;4BACD,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gCACtC,YAAY,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;gCACnC,YAAY,CAAC,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC;gCACvD,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,gBAAgB;oCACtB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;iCAAM,CAAC;gCACP,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC;gCAC/B,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,YAAY;oCAClB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;wBACF,CAAC;wBAED,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;4BACvB,IAAI,YAAY,EAAE,CAAC;gCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,UAAU;wCAChB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wCAC1B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;qCAAM,CAAC;oCACP,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,cAAc;wCACpB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wCAC9B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;gCACD,YAAY,GAAG,IAAI,CAAC;4BACrB,CAAC;4BAED,4DAA4D;4BAC5D,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;4BACxC,MAAM,UAAU,GACf,CAAC,UAAU,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,UAAU,CAAC,CAAC;4BACzF,MAAM,UAAU,GAAG,UAAU;gCAC5B,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,EAAE;gCAChE,CAAC,CAAC,UAAU,CAAC;4BAEd,MAAM,QAAQ,GAAa;gCAC1B,IAAI,EAAE,UAAU;gCAChB,EAAE,EAAE,UAAU;gCACd,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,EAAE;gCAClC,SAAS,EAAE,IAAI,CAAC,YAAY,CAAC,IAA2B;gCACxD,GAAG,CAAC,IAAI,CAAC,gBAAgB,IAAI,EAAE,gBAAgB,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC;6BACzE,CAAC;4BAEF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;4BAC9B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;4BACrF,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,CAAC;gCACzC,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;4BACH,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBAC9F,CAAC;oBACF,CAAC;gBACF,CAAC;gBAED,IAAI,SAAS,EAAE,YAAY,EAAE,CAAC;oBAC7B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,EAAE,CAAC;wBACvD,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;oBAC/B,CAAC;gBACF,CAAC;gBAED,IAAI,KAAK,CAAC,aAAa,EAAE,CAAC;oBACzB,MAAM,CAAC,KAAK,GAAG;wBACd,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC;wBAChD,MAAM,EACL,CAAC,KAAK,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,aAAa,CAAC,kBAAkB,IAAI,CAAC,CAAC;wBAChG,SAAS,EAAE,KAAK,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC;wBAC3D,UAAU,EAAE,CAAC;wBACb,WAAW,EAAE,KAAK,CAAC,aAAa,CAAC,eAAe,IAAI,CAAC;wBACrD,IAAI,EAAE;4BACL,KAAK,EAAE,CAAC;4BACR,MAAM,EAAE,CAAC;4BACT,SAAS,EAAE,CAAC;4BACZ,UAAU,EAAE,CAAC;4BACb,KAAK,EAAE,CAAC;yBACR;qBACD,CAAC;oBACF,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;YACF,CAAC;YAED,IAAI,YAAY,EAAE,CAAC;gBAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAClC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,UAAU;wBAChB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wBAC1B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACP,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,cAAc;wBACpB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wBAC9B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YAED,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC5C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,uDAAuD;YACvD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACpC,IAAI,OAAO,IAAI,KAAK,EAAE,CAAC;oBACtB,OAAQ,KAA4B,CAAC,KAAK,CAAC;gBAC5C,CAAC;YACF,CAAC;YACD,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,SAAS,YAAY,CAAC,KAAoC,EAAE,MAAe,EAAe;IACzF,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CACd,gGAAgG,CAChG,CAAC;QACH,CAAC;QACD,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACrC,CAAC;IAED,MAAM,WAAW,GAA2D,EAAE,CAAC;IAC/E,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;QACnB,WAAW,CAAC,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;QACnB,WAAW,CAAC,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACrC,CAAC;IAED,OAAO,IAAI,WAAW,CAAC;QACtB,MAAM;QACN,WAAW,EAAE,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS;KAC1E,CAAC,CAAC;AAAA,CACH;AAED,SAAS,WAAW,CACnB,KAAoC,EACpC,OAAgB,EAChB,OAAO,GAAkB,EAAE,EACC;IAC5B,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAEjD,MAAM,gBAAgB,GAA0B,EAAE,CAAC;IACnD,IAAI,OAAO,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;QACvC,gBAAgB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IACpD,CAAC;IACD,IAAI,OAAO,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;QACrC,gBAAgB,CAAC,eAAe,GAAG,OAAO,CAAC,SAAS,CAAC;IACtD,CAAC;IAED,MAAM,MAAM,GAA0B;QACrC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,gBAAgB,CAAC;QACjE,GAAG,CAAC,OAAO,CAAC,YAAY,IAAI,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,CAAC;QAC5F,GAAG,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;KACxF,CAAC;IAEF,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;QACrE,MAAM,CAAC,UAAU,GAAG;YACnB,qBAAqB,EAAE;gBACtB,IAAI,EAAE,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC;aACvC;SACD,CAAC;IACH,CAAC;SAAM,CAAC;QACP,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/B,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,EAAE,OAAO,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QAClD,MAAM,cAAc,GAAmB,EAAE,eAAe,EAAE,IAAI,EAAE,CAAC;QACjE,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC1C,cAAc,CAAC,aAAa,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;QACvD,CAAC;aAAM,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,KAAK,SAAS,EAAE,CAAC;YACxD,cAAc,CAAC,cAAc,GAAG,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;QAC/D,CAAC;QACD,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC;IACxC,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QACpB,IAAI,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAC5B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;QACpC,CAAC;QACD,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,MAAM,CAAC;IACrC,CAAC;IAED,MAAM,MAAM,GAA8B;QACzC,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ;QACR,MAAM;KACN,CAAC;IAEF,OAAO,MAAM,CAAC;AAAA,CACd;AACD,SAAS,eAAe,CAAC,KAAoC,EAAE,OAAgB,EAAa;IAC3F,MAAM,QAAQ,GAAc,EAAE,CAAC;IAC/B,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAEvE,KAAK,MAAM,GAAG,IAAI,mBAAmB,EAAE,CAAC;QACvC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,QAAQ,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,MAAM;oBACZ,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;iBAClD,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,MAAM,KAAK,GAAW,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;oBAC/C,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO,EAAE,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;oBAChD,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,UAAU,EAAE;gCACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;gCACvB,IAAI,EAAE,IAAI,CAAC,IAAI;6BACf;yBACD,CAAC;oBACH,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,MAAM,aAAa,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;gBACzG,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACzC,QAAQ,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,MAAM;oBACZ,KAAK,EAAE,aAAa;iBACpB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,MAAM,KAAK,GAAW,EAAE,CAAC;YAEzB,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;gBACjC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBACtD,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,YAAY,GAAS;wBAC1B,OAAO,EAAE,IAAI;wBACb,gBAAgB,EAAE,KAAK,CAAC,iBAAiB;wBACzC,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;qBACxC,CAAC;oBACF,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC1B,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,IAAI,GAAS;wBAClB,YAAY,EAAE;4BACb,EAAE,EAAE,KAAK,CAAC,EAAE;4BACZ,IAAI,EAAE,KAAK,CAAC,IAAI;4BAChB,IAAI,EAAE,KAAK,CAAC,SAAS;yBACrB;qBACD,CAAC;oBACF,IAAI,KAAK,CAAC,gBAAgB,EAAE,CAAC;wBAC5B,IAAI,CAAC,gBAAgB,GAAG,KAAK,CAAC,gBAAgB,CAAC;oBAChD,CAAC;oBACD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAClB,CAAC;YACF,CAAC;YAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACjC,QAAQ,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,OAAO;gBACb,KAAK;aACL,CAAC,CAAC;QACJ,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,wDAAwD;YACxD,MAAM,KAAK,GAAW,EAAE,CAAC;YAEzB,iCAAiC;YACjC,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAoB,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC;YACnF,MAAM,UAAU,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC7D,MAAM,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC;gBACjD,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAqB,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC;gBAClE,CAAC,CAAC,EAAE,CAAC;YAEN,+EAA+E;YAC/E,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;YACtC,MAAM,SAAS,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;YAE1C,gFAAgF;YAChF,MAAM,aAAa,GAAG,OAAO,CAAC,CAAC,CAAC,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,EAAE,CAAC;YAEzG,KAAK,CAAC,IAAI,CAAC;gBACV,gBAAgB,EAAE;oBACjB,EAAE,EAAE,GAAG,CAAC,UAAU;oBAClB,IAAI,EAAE,GAAG,CAAC,QAAQ;oBAClB,QAAQ,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,aAAa,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,aAAa,EAAE;iBAC5E;aACD,CAAC,CAAC;YAEH,qCAAqC;YACrC,KAAK,MAAM,UAAU,IAAI,YAAY,EAAE,CAAC;gBACvC,KAAK,CAAC,IAAI,CAAC;oBACV,UAAU,EAAE;wBACX,QAAQ,EAAE,UAAU,CAAC,QAAQ;wBAC7B,IAAI,EAAE,UAAU,CAAC,IAAI;qBACrB;iBACD,CAAC,CAAC;YACJ,CAAC;YAED,QAAQ,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,MAAM;gBACZ,KAAK;aACL,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,OAAO,QAAQ,CAAC;AAAA,CAChB;AAED,SAAS,YAAY,CACpB,KAAa,EAC0F;IACvG,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,SAAS,CAAC;IACzC,OAAO;QACN;YACC,oBAAoB,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;gBAC1C,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,UAAU,EAAE,IAAI,CAAC,UAAoB,EAAE,gEAAgE;aACvG,CAAC,CAAC;SACH;KACD,CAAC;AAAA,CACF;AAED,SAAS,aAAa,CAAC,MAAc,EAA6B;IACjE,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,MAAM;YACV,OAAO,yBAAyB,CAAC,IAAI,CAAC;QACvC,KAAK,MAAM;YACV,OAAO,yBAAyB,CAAC,IAAI,CAAC;QACvC,KAAK,KAAK;YACT,OAAO,yBAAyB,CAAC,GAAG,CAAC;QACtC;YACC,OAAO,yBAAyB,CAAC,IAAI,CAAC;IACxC,CAAC;AAAA,CACD;AAED,SAAS,aAAa,CAAC,MAAoB,EAAc;IACxD,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,YAAY,CAAC,IAAI;YACrB,OAAO,MAAM,CAAC;QACf,KAAK,YAAY,CAAC,UAAU;YAC3B,OAAO,QAAQ,CAAC;QACjB,KAAK,YAAY,CAAC,SAAS,CAAC;QAC5B,KAAK,YAAY,CAAC,kBAAkB,CAAC;QACrC,KAAK,YAAY,CAAC,IAAI,CAAC;QACvB,KAAK,YAAY,CAAC,MAAM,CAAC;QACzB,KAAK,YAAY,CAAC,YAAY,CAAC;QAC/B,KAAK,YAAY,CAAC,wBAAwB,CAAC;QAC3C,KAAK,YAAY,CAAC,gBAAgB,CAAC;QACnC,KAAK,YAAY,CAAC,WAAW,CAAC;QAC9B,KAAK,YAAY,CAAC,UAAU,CAAC;QAC7B,KAAK,YAAY,CAAC,yBAAyB,CAAC;QAC5C,KAAK,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,YAAY,CAAC,QAAQ,CAAC;QAC3B,KAAK,YAAY,CAAC,uBAAuB,CAAC;QAC1C,KAAK,YAAY,CAAC,oBAAoB,CAAC;QACvC,KAAK,YAAY,CAAC,QAAQ;YACzB,OAAO,OAAO,CAAC;QAChB,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD","sourcesContent":["import {\n\ttype Content,\n\tFinishReason,\n\tFunctionCallingConfigMode,\n\ttype GenerateContentConfig,\n\ttype GenerateContentParameters,\n\tGoogleGenAI,\n\ttype Part,\n\ttype Schema,\n\ttype ThinkingConfig,\n\ttype ThinkingLevel,\n} from \"@google/genai\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface GoogleOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\";\n\tthinking?: {\n\t\tenabled: boolean;\n\t\tbudgetTokens?: number; // -1 for dynamic, 0 to disable\n\t\tlevel?: ThinkingLevel;\n\t};\n}\n\n// Counter for generating unique tool call IDs\nlet toolCallCounter = 0;\n\nexport const streamGoogle: StreamFunction<\"google-generative-ai\"> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: GoogleOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"google-generative-ai\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst client = createClient(model, options?.apiKey);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\tconst googleStream = await client.models.generateContentStream(params);\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tfor await (const chunk of googleStream) {\n\t\t\t\tconst candidate = chunk.candidates?.[0];\n\t\t\t\tif (candidate?.content?.parts) {\n\t\t\t\t\tfor (const part of candidate.content.parts) {\n\t\t\t\t\t\tif (part.text !== undefined) {\n\t\t\t\t\t\t\tconst isThinking = part.thought === true;\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\t(isThinking && currentBlock.type !== \"thinking\") ||\n\t\t\t\t\t\t\t\t(!isThinking && currentBlock.type !== \"text\")\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blocks.length - 1,\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (isThinking) {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\", thinkingSignature: undefined };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\t\tcurrentBlock.thinking += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature = part.thoughtSignature;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcurrentBlock.text += part.text;\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (part.functionCall) {\n\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// Generate unique ID if not provided or if it's a duplicate\n\t\t\t\t\t\t\tconst providedId = part.functionCall.id;\n\t\t\t\t\t\t\tconst needsNewId =\n\t\t\t\t\t\t\t\t!providedId || output.content.some((b) => b.type === \"toolCall\" && b.id === providedId);\n\t\t\t\t\t\t\tconst toolCallId = needsNewId\n\t\t\t\t\t\t\t\t? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`\n\t\t\t\t\t\t\t\t: providedId;\n\n\t\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\tid: toolCallId,\n\t\t\t\t\t\t\t\tname: part.functionCall.name || \"\",\n\t\t\t\t\t\t\t\targuments: part.functionCall.args as Record<string, any>,\n\t\t\t\t\t\t\t\t...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\toutput.content.push(toolCall);\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (candidate?.finishReason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(candidate.finishReason);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\")) {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (chunk.usageMetadata) {\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: chunk.usageMetadata.promptTokenCount || 0,\n\t\t\t\t\t\toutput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),\n\t\t\t\t\t\tcacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: chunk.usageMetadata.totalTokenCount || 0,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (currentBlock) {\n\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unkown error ocurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\t// Remove internal index property used during streaming\n\t\t\tfor (const block of output.content) {\n\t\t\t\tif (\"index\" in block) {\n\t\t\t\t\tdelete (block as { index?: number }).index;\n\t\t\t\t}\n\t\t\t}\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createClient(model: Model<\"google-generative-ai\">, apiKey?: string): GoogleGenAI {\n\tif (!apiKey) {\n\t\tif (!process.env.GEMINI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"Gemini API key is required. Set GEMINI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.GEMINI_API_KEY;\n\t}\n\n\tconst httpOptions: { baseUrl?: string; headers?: Record<string, string> } = {};\n\tif (model.baseUrl) {\n\t\thttpOptions.baseUrl = model.baseUrl;\n\t}\n\tif (model.headers) {\n\t\thttpOptions.headers = model.headers;\n\t}\n\n\treturn new GoogleGenAI({\n\t\tapiKey,\n\t\thttpOptions: Object.keys(httpOptions).length > 0 ? httpOptions : undefined,\n\t});\n}\n\nfunction buildParams(\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions: GoogleOptions = {},\n): GenerateContentParameters {\n\tconst contents = convertMessages(model, context);\n\n\tconst generationConfig: GenerateContentConfig = {};\n\tif (options.temperature !== undefined) {\n\t\tgenerationConfig.temperature = options.temperature;\n\t}\n\tif (options.maxTokens !== undefined) {\n\t\tgenerationConfig.maxOutputTokens = options.maxTokens;\n\t}\n\n\tconst config: GenerateContentConfig = {\n\t\t...(Object.keys(generationConfig).length > 0 && generationConfig),\n\t\t...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),\n\t\t...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),\n\t};\n\n\tif (context.tools && context.tools.length > 0 && options.toolChoice) {\n\t\tconfig.toolConfig = {\n\t\t\tfunctionCallingConfig: {\n\t\t\t\tmode: mapToolChoice(options.toolChoice),\n\t\t\t},\n\t\t};\n\t} else {\n\t\tconfig.toolConfig = undefined;\n\t}\n\n\tif (options.thinking?.enabled && model.reasoning) {\n\t\tconst thinkingConfig: ThinkingConfig = { includeThoughts: true };\n\t\tif (options.thinking.level !== undefined) {\n\t\t\tthinkingConfig.thinkingLevel = options.thinking.level;\n\t\t} else if (options.thinking.budgetTokens !== undefined) {\n\t\t\tthinkingConfig.thinkingBudget = options.thinking.budgetTokens;\n\t\t}\n\t\tconfig.thinkingConfig = thinkingConfig;\n\t}\n\n\tif (options.signal) {\n\t\tif (options.signal.aborted) {\n\t\t\tthrow new Error(\"Request aborted\");\n\t\t}\n\t\tconfig.abortSignal = options.signal;\n\t}\n\n\tconst params: GenerateContentParameters = {\n\t\tmodel: model.id,\n\t\tcontents,\n\t\tconfig,\n\t};\n\n\treturn params;\n}\nfunction convertMessages(model: Model<\"google-generative-ai\">, context: Context): Content[] {\n\tconst contents: Content[] = [];\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: [{ text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst parts: Part[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn { text: sanitizeSurrogates(item.text) };\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tinlineData: {\n\t\t\t\t\t\t\t\tmimeType: item.mimeType,\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredParts = !model.input.includes(\"image\") ? parts.filter((p) => p.text !== undefined) : parts;\n\t\t\t\tif (filteredParts.length === 0) continue;\n\t\t\t\tcontents.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tparts: filteredParts,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst parts: Part[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tparts.push({ text: sanitizeSurrogates(block.text) });\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tconst thinkingPart: Part = {\n\t\t\t\t\t\tthought: true,\n\t\t\t\t\t\tthoughtSignature: block.thinkingSignature,\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t};\n\t\t\t\t\tparts.push(thinkingPart);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst part: Part = {\n\t\t\t\t\t\tfunctionCall: {\n\t\t\t\t\t\t\tid: block.id,\n\t\t\t\t\t\t\tname: block.name,\n\t\t\t\t\t\t\targs: block.arguments,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tif (block.thoughtSignature) {\n\t\t\t\t\t\tpart.thoughtSignature = block.thoughtSignature;\n\t\t\t\t\t}\n\t\t\t\t\tparts.push(part);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (parts.length === 0) continue;\n\t\t\tcontents.push({\n\t\t\t\trole: \"model\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Build parts array with functionResponse and/or images\n\t\t\tconst parts: Part[] = [];\n\n\t\t\t// Extract text and image content\n\t\t\tconst textContent = msg.content.filter((c): c is TextContent => c.type === \"text\");\n\t\t\tconst textResult = textContent.map((c) => c.text).join(\"\\n\");\n\t\t\tconst imageContent = model.input.includes(\"image\")\n\t\t\t\t? msg.content.filter((c): c is ImageContent => c.type === \"image\")\n\t\t\t\t: [];\n\n\t\t\t// Always add functionResponse with text result (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst hasImages = imageContent.length > 0;\n\n\t\t\t// Use \"output\" key for success, \"error\" key for errors as per SDK documentation\n\t\t\tconst responseValue = hasText ? sanitizeSurrogates(textResult) : hasImages ? \"(see attached image)\" : \"\";\n\n\t\t\tparts.push({\n\t\t\t\tfunctionResponse: {\n\t\t\t\t\tid: msg.toolCallId,\n\t\t\t\t\tname: msg.toolName,\n\t\t\t\t\tresponse: msg.isError ? { error: responseValue } : { output: responseValue },\n\t\t\t\t},\n\t\t\t});\n\n\t\t\t// Add any images as inlineData parts\n\t\t\tfor (const imageBlock of imageContent) {\n\t\t\t\tparts.push({\n\t\t\t\t\tinlineData: {\n\t\t\t\t\t\tmimeType: imageBlock.mimeType,\n\t\t\t\t\t\tdata: imageBlock.data,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tcontents.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tparts,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn contents;\n}\n\nfunction convertTools(\n\ttools: Tool[],\n): { functionDeclarations: { name: string; description?: string; parameters: Schema }[] }[] | undefined {\n\tif (tools.length === 0) return undefined;\n\treturn [\n\t\t{\n\t\t\tfunctionDeclarations: tools.map((tool) => ({\n\t\t\t\tname: tool.name,\n\t\t\t\tdescription: tool.description,\n\t\t\t\tparameters: tool.parameters as Schema, // TypeBox generates JSON Schema compatible with SDK Schema type\n\t\t\t})),\n\t\t},\n\t];\n}\n\nfunction mapToolChoice(choice: string): FunctionCallingConfigMode {\n\tswitch (choice) {\n\t\tcase \"auto\":\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t\tcase \"none\":\n\t\t\treturn FunctionCallingConfigMode.NONE;\n\t\tcase \"any\":\n\t\t\treturn FunctionCallingConfigMode.ANY;\n\t\tdefault:\n\t\t\treturn FunctionCallingConfigMode.AUTO;\n\t}\n}\n\nfunction mapStopReason(reason: FinishReason): StopReason {\n\tswitch (reason) {\n\t\tcase FinishReason.STOP:\n\t\t\treturn \"stop\";\n\t\tcase FinishReason.MAX_TOKENS:\n\t\t\treturn \"length\";\n\t\tcase FinishReason.BLOCKLIST:\n\t\tcase FinishReason.PROHIBITED_CONTENT:\n\t\tcase FinishReason.SPII:\n\t\tcase FinishReason.SAFETY:\n\t\tcase FinishReason.IMAGE_SAFETY:\n\t\tcase FinishReason.IMAGE_PROHIBITED_CONTENT:\n\t\tcase FinishReason.IMAGE_RECITATION:\n\t\tcase FinishReason.IMAGE_OTHER:\n\t\tcase FinishReason.RECITATION:\n\t\tcase FinishReason.FINISH_REASON_UNSPECIFIED:\n\t\tcase FinishReason.OTHER:\n\t\tcase FinishReason.LANGUAGE:\n\t\tcase FinishReason.MALFORMED_FUNCTION_CALL:\n\t\tcase FinishReason.UNEXPECTED_TOOL_CALL:\n\t\tcase FinishReason.NO_IMAGE:\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../src/stream.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EACX,GAAG,EACH,gBAAgB,EAChB,2BAA2B,EAC3B,OAAO,EACP,aAAa,EACb,KAAK,EACL,aAAa,EAEb,mBAAmB,EACnB,MAAM,YAAY,CAAC;AAIpB,wBAAgB,SAAS,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;AACtE,wBAAgB,SAAS,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;AAK/D,wBAAgB,SAAS,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,wBAAgB,SAAS,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;AA2BhE,wBAAgB,MAAM,CAAC,IAAI,SAAS,GAAG,EACtC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,aAAa,CAAC,IAAI,CAAC,GAC3B,2BAA2B,CA2B7B;AAED,wBAAsB,QAAQ,CAAC,IAAI,SAAS,GAAG,EAC9C,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,aAAa,CAAC,IAAI,CAAC,GAC3B,OAAO,CAAC,gBAAgB,CAAC,CAG3B;AAED,wBAAgB,YAAY,CAAC,IAAI,SAAS,GAAG,EAC5C,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,mBAAmB,GAC3B,2BAA2B,CAQ7B;AAED,wBAAsB,cAAc,CAAC,IAAI,SAAS,GAAG,EACpD,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,mBAAmB,GAC3B,OAAO,CAAC,gBAAgB,CAAC,CAG3B","sourcesContent":["import { ThinkingLevel } from \"@google/genai\";\nimport { type AnthropicOptions, streamAnthropic } from \"./providers/anthropic.js\";\nimport { type GoogleOptions, streamGoogle } from \"./providers/google.js\";\nimport { type OpenAICompletionsOptions, streamOpenAICompletions } from \"./providers/openai-completions.js\";\nimport { type OpenAIResponsesOptions, streamOpenAIResponses } from \"./providers/openai-responses.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tKnownProvider,\n\tModel,\n\tOptionsForApi,\n\tReasoningEffort,\n\tSimpleStreamOptions,\n} from \"./types.js\";\n\nconst apiKeys: Map<string, string> = new Map();\n\nexport function setApiKey(provider: KnownProvider, key: string): void;\nexport function setApiKey(provider: string, key: string): void;\nexport function setApiKey(provider: any, key: string): void {\n\tapiKeys.set(provider, key);\n}\n\nexport function getApiKey(provider: KnownProvider): string | undefined;\nexport function getApiKey(provider: string): string | undefined;\nexport function getApiKey(provider: any): string | undefined {\n\t// Check explicit keys first\n\tconst key = apiKeys.get(provider);\n\tif (key) return key;\n\n\t// Fall back to environment variables\n\tif (provider === \"github-copilot\") {\n\t\treturn process.env.COPILOT_GITHUB_TOKEN || process.env.GH_TOKEN || process.env.GITHUB_TOKEN;\n\t}\n\n\tconst envMap: Record<string, string> = {\n\t\topenai: \"OPENAI_API_KEY\",\n\t\tanthropic: \"ANTHROPIC_API_KEY\",\n\t\tgoogle: \"GEMINI_API_KEY\",\n\t\tgroq: \"GROQ_API_KEY\",\n\t\tcerebras: \"CEREBRAS_API_KEY\",\n\t\txai: \"XAI_API_KEY\",\n\t\topenrouter: \"OPENROUTER_API_KEY\",\n\t\tzai: \"ZAI_API_KEY\",\n\t\tmistral: \"MISTRAL_API_KEY\",\n\t};\n\n\tconst envVar = envMap[provider];\n\treturn envVar ? process.env[envVar] : undefined;\n}\n\nexport function stream<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\tconst providerOptions = { ...options, apiKey };\n\n\tconst api: Api = model.api;\n\tswitch (api) {\n\t\tcase \"anthropic-messages\":\n\t\t\treturn streamAnthropic(model as Model<\"anthropic-messages\">, context, providerOptions);\n\n\t\tcase \"openai-completions\":\n\t\t\treturn streamOpenAICompletions(model as Model<\"openai-completions\">, context, providerOptions as any);\n\n\t\tcase \"openai-responses\":\n\t\t\treturn streamOpenAIResponses(model as Model<\"openai-responses\">, context, providerOptions as any);\n\n\t\tcase \"google-generative-ai\":\n\t\t\treturn streamGoogle(model as Model<\"google-generative-ai\">, context, providerOptions);\n\n\t\tdefault: {\n\t\t\t// This should never be reached if all Api cases are handled\n\t\t\tconst _exhaustive: never = api;\n\t\t\tthrow new Error(`Unhandled API: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\nexport async function complete<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): Promise<AssistantMessage> {\n\tconst s = stream(model, context, options);\n\treturn s.result();\n}\n\nexport function streamSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst providerOptions = mapOptionsForApi(model, options, apiKey);\n\treturn stream(model, context, providerOptions);\n}\n\nexport async function completeSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): Promise<AssistantMessage> {\n\tconst s = streamSimple(model, context, options);\n\treturn s.result();\n}\n\nfunction mapOptionsForApi<TApi extends Api>(\n\tmodel: Model<TApi>,\n\toptions?: SimpleStreamOptions,\n\tapiKey?: string,\n): OptionsForApi<TApi> {\n\tconst base = {\n\t\ttemperature: options?.temperature,\n\t\tmaxTokens: options?.maxTokens || Math.min(model.maxTokens, 32000),\n\t\tsignal: options?.signal,\n\t\tapiKey: apiKey || options?.apiKey,\n\t};\n\n\t// Helper to clamp xhigh to high for providers that don't support it\n\tconst clampReasoning = (effort: ReasoningEffort | undefined) => (effort === \"xhigh\" ? \"high\" : effort);\n\n\tswitch (model.api) {\n\t\tcase \"anthropic-messages\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinkingEnabled: false } satisfies AnthropicOptions;\n\t\t\t}\n\n\t\t\tconst anthropicBudgets = {\n\t\t\t\tminimal: 1024,\n\t\t\t\tlow: 2048,\n\t\t\t\tmedium: 8192,\n\t\t\t\thigh: 16384,\n\t\t\t};\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinkingEnabled: true,\n\t\t\t\tthinkingBudgetTokens: anthropicBudgets[clampReasoning(options.reasoning)!],\n\t\t\t} satisfies AnthropicOptions;\n\t\t}\n\n\t\tcase \"openai-completions\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAICompletionsOptions;\n\n\t\tcase \"openai-responses\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAIResponsesOptions;\n\n\t\tcase \"google-generative-ai\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\t// This is needed because Gemini has \"dynamic thinking\" enabled by default\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinking: { enabled: false } } satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\tconst googleModel = model as Model<\"google-generative-ai\">;\n\t\t\tconst effort = clampReasoning(options.reasoning)!;\n\n\t\t\t// Gemini 3 Pro models use thinkingLevel exclusively instead of thinkingBudget.\n\t\t\t// https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\t\t\tif (isGemini3ProModel(googleModel)) {\n\t\t\t\treturn {\n\t\t\t\t\t...base,\n\t\t\t\t\tthinking: {\n\t\t\t\t\t\tenabled: true,\n\t\t\t\t\t\tlevel: getGoogleThinkingLevel(effort),\n\t\t\t\t\t},\n\t\t\t\t} satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinking: {\n\t\t\t\t\tenabled: true,\n\t\t\t\t\tbudgetTokens: getGoogleBudget(googleModel, effort),\n\t\t\t\t},\n\t\t\t} satisfies GoogleOptions;\n\t\t}\n\n\t\tdefault: {\n\t\t\t// Exhaustiveness check\n\t\t\tconst _exhaustive: never = model.api;\n\t\t\tthrow new Error(`Unhandled API in mapOptionsForApi: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\ntype ClampedReasoningEffort = Exclude<ReasoningEffort, \"xhigh\">;\n\nfunction isGemini3ProModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-pro, gemini-3-pro-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-pro\");\n}\n\nfunction getGoogleThinkingLevel(effort: ClampedReasoningEffort): ThinkingLevel {\n\t// Gemini 3 Pro only supports LOW/HIGH (for now)\n\tswitch (effort) {\n\t\tcase \"minimal\":\n\t\tcase \"low\":\n\t\t\treturn ThinkingLevel.LOW;\n\t\tcase \"medium\":\n\t\tcase \"high\":\n\t\t\treturn ThinkingLevel.HIGH;\n\t}\n}\n\nfunction getGoogleBudget(model: Model<\"google-generative-ai\">, effort: ClampedReasoningEffort): number {\n\t// See https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\tif (model.id.includes(\"2.5-pro\")) {\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 32768,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash\")) {\n\t\t// Covers 2.5-flash-lite as well\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\t// Unknown model - use dynamic\n\treturn -1;\n}\n"]}
1
+ {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../src/stream.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EACX,GAAG,EACH,gBAAgB,EAChB,2BAA2B,EAC3B,OAAO,EACP,aAAa,EACb,KAAK,EACL,aAAa,EAEb,mBAAmB,EACnB,MAAM,YAAY,CAAC;AAIpB,wBAAgB,SAAS,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;AACtE,wBAAgB,SAAS,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;AAK/D,wBAAgB,SAAS,CAAC,QAAQ,EAAE,aAAa,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,wBAAgB,SAAS,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;AA2BhE,wBAAgB,MAAM,CAAC,IAAI,SAAS,GAAG,EACtC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,aAAa,CAAC,IAAI,CAAC,GAC3B,2BAA2B,CA2B7B;AAED,wBAAsB,QAAQ,CAAC,IAAI,SAAS,GAAG,EAC9C,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,aAAa,CAAC,IAAI,CAAC,GAC3B,OAAO,CAAC,gBAAgB,CAAC,CAG3B;AAED,wBAAgB,YAAY,CAAC,IAAI,SAAS,GAAG,EAC5C,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,mBAAmB,GAC3B,2BAA2B,CAQ7B;AAED,wBAAsB,cAAc,CAAC,IAAI,SAAS,GAAG,EACpD,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,mBAAmB,GAC3B,OAAO,CAAC,gBAAgB,CAAC,CAG3B","sourcesContent":["import { ThinkingLevel } from \"@google/genai\";\nimport { type AnthropicOptions, streamAnthropic } from \"./providers/anthropic.js\";\nimport { type GoogleOptions, streamGoogle } from \"./providers/google.js\";\nimport { type OpenAICompletionsOptions, streamOpenAICompletions } from \"./providers/openai-completions.js\";\nimport { type OpenAIResponsesOptions, streamOpenAIResponses } from \"./providers/openai-responses.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tKnownProvider,\n\tModel,\n\tOptionsForApi,\n\tReasoningEffort,\n\tSimpleStreamOptions,\n} from \"./types.js\";\n\nconst apiKeys: Map<string, string> = new Map();\n\nexport function setApiKey(provider: KnownProvider, key: string): void;\nexport function setApiKey(provider: string, key: string): void;\nexport function setApiKey(provider: any, key: string): void {\n\tapiKeys.set(provider, key);\n}\n\nexport function getApiKey(provider: KnownProvider): string | undefined;\nexport function getApiKey(provider: string): string | undefined;\nexport function getApiKey(provider: any): string | undefined {\n\t// Check explicit keys first\n\tconst key = apiKeys.get(provider);\n\tif (key) return key;\n\n\t// Fall back to environment variables\n\tif (provider === \"github-copilot\") {\n\t\treturn process.env.COPILOT_GITHUB_TOKEN || process.env.GH_TOKEN || process.env.GITHUB_TOKEN;\n\t}\n\n\tconst envMap: Record<string, string> = {\n\t\topenai: \"OPENAI_API_KEY\",\n\t\tanthropic: \"ANTHROPIC_API_KEY\",\n\t\tgoogle: \"GEMINI_API_KEY\",\n\t\tgroq: \"GROQ_API_KEY\",\n\t\tcerebras: \"CEREBRAS_API_KEY\",\n\t\txai: \"XAI_API_KEY\",\n\t\topenrouter: \"OPENROUTER_API_KEY\",\n\t\tzai: \"ZAI_API_KEY\",\n\t\tmistral: \"MISTRAL_API_KEY\",\n\t};\n\n\tconst envVar = envMap[provider];\n\treturn envVar ? process.env[envVar] : undefined;\n}\n\nexport function stream<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\tconst providerOptions = { ...options, apiKey };\n\n\tconst api: Api = model.api;\n\tswitch (api) {\n\t\tcase \"anthropic-messages\":\n\t\t\treturn streamAnthropic(model as Model<\"anthropic-messages\">, context, providerOptions);\n\n\t\tcase \"openai-completions\":\n\t\t\treturn streamOpenAICompletions(model as Model<\"openai-completions\">, context, providerOptions as any);\n\n\t\tcase \"openai-responses\":\n\t\t\treturn streamOpenAIResponses(model as Model<\"openai-responses\">, context, providerOptions as any);\n\n\t\tcase \"google-generative-ai\":\n\t\t\treturn streamGoogle(model as Model<\"google-generative-ai\">, context, providerOptions);\n\n\t\tdefault: {\n\t\t\t// This should never be reached if all Api cases are handled\n\t\t\tconst _exhaustive: never = api;\n\t\t\tthrow new Error(`Unhandled API: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\nexport async function complete<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): Promise<AssistantMessage> {\n\tconst s = stream(model, context, options);\n\treturn s.result();\n}\n\nexport function streamSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst providerOptions = mapOptionsForApi(model, options, apiKey);\n\treturn stream(model, context, providerOptions);\n}\n\nexport async function completeSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): Promise<AssistantMessage> {\n\tconst s = streamSimple(model, context, options);\n\treturn s.result();\n}\n\nfunction mapOptionsForApi<TApi extends Api>(\n\tmodel: Model<TApi>,\n\toptions?: SimpleStreamOptions,\n\tapiKey?: string,\n): OptionsForApi<TApi> {\n\tconst base = {\n\t\ttemperature: options?.temperature,\n\t\tmaxTokens: options?.maxTokens || Math.min(model.maxTokens, 32000),\n\t\tsignal: options?.signal,\n\t\tapiKey: apiKey || options?.apiKey,\n\t};\n\n\t// Helper to clamp xhigh to high for providers that don't support it\n\tconst clampReasoning = (effort: ReasoningEffort | undefined) => (effort === \"xhigh\" ? \"high\" : effort);\n\n\tswitch (model.api) {\n\t\tcase \"anthropic-messages\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinkingEnabled: false } satisfies AnthropicOptions;\n\t\t\t}\n\n\t\t\tconst anthropicBudgets = {\n\t\t\t\tminimal: 1024,\n\t\t\t\tlow: 2048,\n\t\t\t\tmedium: 8192,\n\t\t\t\thigh: 16384,\n\t\t\t};\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinkingEnabled: true,\n\t\t\t\tthinkingBudgetTokens: anthropicBudgets[clampReasoning(options.reasoning)!],\n\t\t\t} satisfies AnthropicOptions;\n\t\t}\n\n\t\tcase \"openai-completions\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAICompletionsOptions;\n\n\t\tcase \"openai-responses\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAIResponsesOptions;\n\n\t\tcase \"google-generative-ai\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\t// This is needed because Gemini has \"dynamic thinking\" enabled by default\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinking: { enabled: false } } satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\tconst googleModel = model as Model<\"google-generative-ai\">;\n\t\t\tconst effort = clampReasoning(options.reasoning)!;\n\n\t\t\t// Gemini 3 models use thinkingLevel exclusively instead of thinkingBudget.\n\t\t\t// https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\t\t\tif (isGemini3ProModel(googleModel) || isGemini3FlashModel(googleModel)) {\n\t\t\t\treturn {\n\t\t\t\t\t...base,\n\t\t\t\t\tthinking: {\n\t\t\t\t\t\tenabled: true,\n\t\t\t\t\t\tlevel: getGemini3ThinkingLevel(effort, googleModel),\n\t\t\t\t\t},\n\t\t\t\t} satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinking: {\n\t\t\t\t\tenabled: true,\n\t\t\t\t\tbudgetTokens: getGoogleBudget(googleModel, effort),\n\t\t\t\t},\n\t\t\t} satisfies GoogleOptions;\n\t\t}\n\n\t\tdefault: {\n\t\t\t// Exhaustiveness check\n\t\t\tconst _exhaustive: never = model.api;\n\t\t\tthrow new Error(`Unhandled API in mapOptionsForApi: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\ntype ClampedReasoningEffort = Exclude<ReasoningEffort, \"xhigh\">;\n\nfunction isGemini3ProModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-pro, gemini-3-pro-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-pro\");\n}\n\nfunction isGemini3FlashModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-flash, gemini-3-flash-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-flash\");\n}\n\nfunction getGemini3ThinkingLevel(effort: ClampedReasoningEffort, model: Model<\"google-generative-ai\">): ThinkingLevel {\n\tif (isGemini3ProModel(model)) {\n\t\t// Gemini 3 Pro only supports LOW/HIGH (for now)\n\t\tswitch (effort) {\n\t\t\tcase \"minimal\":\n\t\t\tcase \"low\":\n\t\t\t\treturn ThinkingLevel.LOW;\n\t\t\tcase \"medium\":\n\t\t\tcase \"high\":\n\t\t\t\treturn ThinkingLevel.HIGH;\n\t\t}\n\t}\n\t// Gemini 3 Flash supports all four levels\n\tswitch (effort) {\n\t\tcase \"minimal\":\n\t\t\treturn ThinkingLevel.MINIMAL;\n\t\tcase \"low\":\n\t\t\treturn ThinkingLevel.LOW;\n\t\tcase \"medium\":\n\t\t\treturn ThinkingLevel.MEDIUM;\n\t\tcase \"high\":\n\t\t\treturn ThinkingLevel.HIGH;\n\t}\n}\n\nfunction getGoogleBudget(model: Model<\"google-generative-ai\">, effort: ClampedReasoningEffort): number {\n\t// See https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\tif (model.id.includes(\"2.5-pro\")) {\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 32768,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash\")) {\n\t\t// Covers 2.5-flash-lite as well\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\t// Unknown model - use dynamic\n\treturn -1;\n}\n"]}
package/dist/stream.js CHANGED
@@ -114,14 +114,14 @@ function mapOptionsForApi(model, options, apiKey) {
114
114
  }
115
115
  const googleModel = model;
116
116
  const effort = clampReasoning(options.reasoning);
117
- // Gemini 3 Pro models use thinkingLevel exclusively instead of thinkingBudget.
117
+ // Gemini 3 models use thinkingLevel exclusively instead of thinkingBudget.
118
118
  // https://ai.google.dev/gemini-api/docs/thinking#set-budget
119
- if (isGemini3ProModel(googleModel)) {
119
+ if (isGemini3ProModel(googleModel) || isGemini3FlashModel(googleModel)) {
120
120
  return {
121
121
  ...base,
122
122
  thinking: {
123
123
  enabled: true,
124
- level: getGoogleThinkingLevel(effort),
124
+ level: getGemini3ThinkingLevel(effort, googleModel),
125
125
  },
126
126
  };
127
127
  }
@@ -144,13 +144,30 @@ function isGemini3ProModel(model) {
144
144
  // Covers gemini-3-pro, gemini-3-pro-preview, and possible other prefixed ids in the future
145
145
  return model.id.includes("3-pro");
146
146
  }
147
- function getGoogleThinkingLevel(effort) {
148
- // Gemini 3 Pro only supports LOW/HIGH (for now)
147
+ function isGemini3FlashModel(model) {
148
+ // Covers gemini-3-flash, gemini-3-flash-preview, and possible other prefixed ids in the future
149
+ return model.id.includes("3-flash");
150
+ }
151
+ function getGemini3ThinkingLevel(effort, model) {
152
+ if (isGemini3ProModel(model)) {
153
+ // Gemini 3 Pro only supports LOW/HIGH (for now)
154
+ switch (effort) {
155
+ case "minimal":
156
+ case "low":
157
+ return ThinkingLevel.LOW;
158
+ case "medium":
159
+ case "high":
160
+ return ThinkingLevel.HIGH;
161
+ }
162
+ }
163
+ // Gemini 3 Flash supports all four levels
149
164
  switch (effort) {
150
165
  case "minimal":
166
+ return ThinkingLevel.MINIMAL;
151
167
  case "low":
152
168
  return ThinkingLevel.LOW;
153
169
  case "medium":
170
+ return ThinkingLevel.MEDIUM;
154
171
  case "high":
155
172
  return ThinkingLevel.HIGH;
156
173
  }
@@ -1 +1 @@
1
- {"version":3,"file":"stream.js","sourceRoot":"","sources":["../src/stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AAC9C,OAAO,EAAyB,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAClF,OAAO,EAAsB,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACzE,OAAO,EAAiC,uBAAuB,EAAE,MAAM,mCAAmC,CAAC;AAC3G,OAAO,EAA+B,qBAAqB,EAAE,MAAM,iCAAiC,CAAC;AAarG,MAAM,OAAO,GAAwB,IAAI,GAAG,EAAE,CAAC;AAI/C,MAAM,UAAU,SAAS,CAAC,QAAa,EAAE,GAAW,EAAQ;IAC3D,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;AAAA,CAC3B;AAID,MAAM,UAAU,SAAS,CAAC,QAAa,EAAsB;IAC5D,4BAA4B;IAC5B,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAClC,IAAI,GAAG;QAAE,OAAO,GAAG,CAAC;IAEpB,qCAAqC;IACrC,IAAI,QAAQ,KAAK,gBAAgB,EAAE,CAAC;QACnC,OAAO,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC;IAC7F,CAAC;IAED,MAAM,MAAM,GAA2B;QACtC,MAAM,EAAE,gBAAgB;QACxB,SAAS,EAAE,mBAAmB;QAC9B,MAAM,EAAE,gBAAgB;QACxB,IAAI,EAAE,cAAc;QACpB,QAAQ,EAAE,kBAAkB;QAC5B,GAAG,EAAE,aAAa;QAClB,UAAU,EAAE,oBAAoB;QAChC,GAAG,EAAE,aAAa;QAClB,OAAO,EAAE,iBAAiB;KAC1B,CAAC;IAEF,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;IAChC,OAAO,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAChD;AAED,MAAM,UAAU,MAAM,CACrB,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACC;IAC9B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IACD,MAAM,eAAe,GAAG,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,CAAC;IAE/C,MAAM,GAAG,GAAQ,KAAK,CAAC,GAAG,CAAC;IAC3B,QAAQ,GAAG,EAAE,CAAC;QACb,KAAK,oBAAoB;YACxB,OAAO,eAAe,CAAC,KAAoC,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;QAExF,KAAK,oBAAoB;YACxB,OAAO,uBAAuB,CAAC,KAAoC,EAAE,OAAO,EAAE,eAAsB,CAAC,CAAC;QAEvG,KAAK,kBAAkB;YACtB,OAAO,qBAAqB,CAAC,KAAkC,EAAE,OAAO,EAAE,eAAsB,CAAC,CAAC;QAEnG,KAAK,sBAAsB;YAC1B,OAAO,YAAY,CAAC,KAAsC,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;QAEvF,SAAS,CAAC;YACT,4DAA4D;YAC5D,MAAM,WAAW,GAAU,GAAG,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,kBAAkB,WAAW,EAAE,CAAC,CAAC;QAClD,CAAC;IACF,CAAC;AAAA,CACD;AAED,MAAM,CAAC,KAAK,UAAU,QAAQ,CAC7B,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACD;IAC5B,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC,MAAM,EAAE,CAAC;AAAA,CAClB;AAED,MAAM,UAAU,YAAY,CAC3B,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACC;IAC9B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,eAAe,GAAG,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACjE,OAAO,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,CAC/C;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CACnC,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACD;IAC5B,MAAM,CAAC,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAChD,OAAO,CAAC,CAAC,MAAM,EAAE,CAAC;AAAA,CAClB;AAED,SAAS,gBAAgB,CACxB,KAAkB,EAClB,OAA6B,EAC7B,MAAe,EACO;IACtB,MAAM,IAAI,GAAG;QACZ,WAAW,EAAE,OAAO,EAAE,WAAW;QACjC,SAAS,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,CAAC;QACjE,MAAM,EAAE,OAAO,EAAE,MAAM;QACvB,MAAM,EAAE,MAAM,IAAI,OAAO,EAAE,MAAM;KACjC,CAAC;IAEF,oEAAoE;IACpE,MAAM,cAAc,GAAG,CAAC,MAAmC,EAAE,EAAE,CAAC,CAAC,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;IAEvG,QAAQ,KAAK,CAAC,GAAG,EAAE,CAAC;QACnB,KAAK,oBAAoB,EAAE,CAAC;YAC3B,8DAA8D;YAC9D,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC;gBACzB,OAAO,EAAE,GAAG,IAAI,EAAE,eAAe,EAAE,KAAK,EAA6B,CAAC;YACvE,CAAC;YAED,MAAM,gBAAgB,GAAG;gBACxB,OAAO,EAAE,IAAI;gBACb,GAAG,EAAE,IAAI;gBACT,MAAM,EAAE,IAAI;gBACZ,IAAI,EAAE,KAAK;aACX,CAAC;YAEF,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,IAAI;gBACrB,oBAAoB,EAAE,gBAAgB,CAAC,cAAc,CAAC,OAAO,CAAC,SAAS,CAAE,CAAC;aAC/C,CAAC;QAC9B,CAAC;QAED,KAAK,oBAAoB;YACxB,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,OAAO,EAAE,SAAS;aACA,CAAC;QAEtC,KAAK,kBAAkB;YACtB,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,OAAO,EAAE,SAAS;aACF,CAAC;QAEpC,KAAK,sBAAsB,EAAE,CAAC;YAC7B,8DAA8D;YAC9D,0EAA0E;YAC1E,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC;gBACzB,OAAO,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,EAA0B,CAAC;YAC1E,CAAC;YAED,MAAM,WAAW,GAAG,KAAsC,CAAC;YAC3D,MAAM,MAAM,GAAG,cAAc,CAAC,OAAO,CAAC,SAAS,CAAE,CAAC;YAElD,+EAA+E;YAC/E,4DAA4D;YAC5D,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE,CAAC;gBACpC,OAAO;oBACN,GAAG,IAAI;oBACP,QAAQ,EAAE;wBACT,OAAO,EAAE,IAAI;wBACb,KAAK,EAAE,sBAAsB,CAAC,MAAM,CAAC;qBACrC;iBACuB,CAAC;YAC3B,CAAC;YAED,OAAO;gBACN,GAAG,IAAI;gBACP,QAAQ,EAAE;oBACT,OAAO,EAAE,IAAI;oBACb,YAAY,EAAE,eAAe,CAAC,WAAW,EAAE,MAAM,CAAC;iBAClD;aACuB,CAAC;QAC3B,CAAC;QAED,SAAS,CAAC;YACT,uBAAuB;YACvB,MAAM,WAAW,GAAU,KAAK,CAAC,GAAG,CAAC;YACrC,MAAM,IAAI,KAAK,CAAC,sCAAsC,WAAW,EAAE,CAAC,CAAC;QACtE,CAAC;IACF,CAAC;AAAA,CACD;AAID,SAAS,iBAAiB,CAAC,KAAoC,EAAW;IACzE,2FAA2F;IAC3F,OAAO,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AAAA,CAClC;AAED,SAAS,sBAAsB,CAAC,MAA8B,EAAiB;IAC9E,gDAAgD;IAChD,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,SAAS,CAAC;QACf,KAAK,KAAK;YACT,OAAO,aAAa,CAAC,GAAG,CAAC;QAC1B,KAAK,QAAQ,CAAC;QACd,KAAK,MAAM;YACV,OAAO,aAAa,CAAC,IAAI,CAAC;IAC5B,CAAC;AAAA,CACD;AAED,SAAS,eAAe,CAAC,KAAoC,EAAE,MAA8B,EAAU;IACtG,gEAAgE;IAChE,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAClC,MAAM,OAAO,GAA2C;YACvD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QACpC,gCAAgC;QAChC,MAAM,OAAO,GAA2C;YACvD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,8BAA8B;IAC9B,OAAO,CAAC,CAAC,CAAC;AAAA,CACV","sourcesContent":["import { ThinkingLevel } from \"@google/genai\";\nimport { type AnthropicOptions, streamAnthropic } from \"./providers/anthropic.js\";\nimport { type GoogleOptions, streamGoogle } from \"./providers/google.js\";\nimport { type OpenAICompletionsOptions, streamOpenAICompletions } from \"./providers/openai-completions.js\";\nimport { type OpenAIResponsesOptions, streamOpenAIResponses } from \"./providers/openai-responses.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tKnownProvider,\n\tModel,\n\tOptionsForApi,\n\tReasoningEffort,\n\tSimpleStreamOptions,\n} from \"./types.js\";\n\nconst apiKeys: Map<string, string> = new Map();\n\nexport function setApiKey(provider: KnownProvider, key: string): void;\nexport function setApiKey(provider: string, key: string): void;\nexport function setApiKey(provider: any, key: string): void {\n\tapiKeys.set(provider, key);\n}\n\nexport function getApiKey(provider: KnownProvider): string | undefined;\nexport function getApiKey(provider: string): string | undefined;\nexport function getApiKey(provider: any): string | undefined {\n\t// Check explicit keys first\n\tconst key = apiKeys.get(provider);\n\tif (key) return key;\n\n\t// Fall back to environment variables\n\tif (provider === \"github-copilot\") {\n\t\treturn process.env.COPILOT_GITHUB_TOKEN || process.env.GH_TOKEN || process.env.GITHUB_TOKEN;\n\t}\n\n\tconst envMap: Record<string, string> = {\n\t\topenai: \"OPENAI_API_KEY\",\n\t\tanthropic: \"ANTHROPIC_API_KEY\",\n\t\tgoogle: \"GEMINI_API_KEY\",\n\t\tgroq: \"GROQ_API_KEY\",\n\t\tcerebras: \"CEREBRAS_API_KEY\",\n\t\txai: \"XAI_API_KEY\",\n\t\topenrouter: \"OPENROUTER_API_KEY\",\n\t\tzai: \"ZAI_API_KEY\",\n\t\tmistral: \"MISTRAL_API_KEY\",\n\t};\n\n\tconst envVar = envMap[provider];\n\treturn envVar ? process.env[envVar] : undefined;\n}\n\nexport function stream<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\tconst providerOptions = { ...options, apiKey };\n\n\tconst api: Api = model.api;\n\tswitch (api) {\n\t\tcase \"anthropic-messages\":\n\t\t\treturn streamAnthropic(model as Model<\"anthropic-messages\">, context, providerOptions);\n\n\t\tcase \"openai-completions\":\n\t\t\treturn streamOpenAICompletions(model as Model<\"openai-completions\">, context, providerOptions as any);\n\n\t\tcase \"openai-responses\":\n\t\t\treturn streamOpenAIResponses(model as Model<\"openai-responses\">, context, providerOptions as any);\n\n\t\tcase \"google-generative-ai\":\n\t\t\treturn streamGoogle(model as Model<\"google-generative-ai\">, context, providerOptions);\n\n\t\tdefault: {\n\t\t\t// This should never be reached if all Api cases are handled\n\t\t\tconst _exhaustive: never = api;\n\t\t\tthrow new Error(`Unhandled API: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\nexport async function complete<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): Promise<AssistantMessage> {\n\tconst s = stream(model, context, options);\n\treturn s.result();\n}\n\nexport function streamSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst providerOptions = mapOptionsForApi(model, options, apiKey);\n\treturn stream(model, context, providerOptions);\n}\n\nexport async function completeSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): Promise<AssistantMessage> {\n\tconst s = streamSimple(model, context, options);\n\treturn s.result();\n}\n\nfunction mapOptionsForApi<TApi extends Api>(\n\tmodel: Model<TApi>,\n\toptions?: SimpleStreamOptions,\n\tapiKey?: string,\n): OptionsForApi<TApi> {\n\tconst base = {\n\t\ttemperature: options?.temperature,\n\t\tmaxTokens: options?.maxTokens || Math.min(model.maxTokens, 32000),\n\t\tsignal: options?.signal,\n\t\tapiKey: apiKey || options?.apiKey,\n\t};\n\n\t// Helper to clamp xhigh to high for providers that don't support it\n\tconst clampReasoning = (effort: ReasoningEffort | undefined) => (effort === \"xhigh\" ? \"high\" : effort);\n\n\tswitch (model.api) {\n\t\tcase \"anthropic-messages\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinkingEnabled: false } satisfies AnthropicOptions;\n\t\t\t}\n\n\t\t\tconst anthropicBudgets = {\n\t\t\t\tminimal: 1024,\n\t\t\t\tlow: 2048,\n\t\t\t\tmedium: 8192,\n\t\t\t\thigh: 16384,\n\t\t\t};\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinkingEnabled: true,\n\t\t\t\tthinkingBudgetTokens: anthropicBudgets[clampReasoning(options.reasoning)!],\n\t\t\t} satisfies AnthropicOptions;\n\t\t}\n\n\t\tcase \"openai-completions\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAICompletionsOptions;\n\n\t\tcase \"openai-responses\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAIResponsesOptions;\n\n\t\tcase \"google-generative-ai\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\t// This is needed because Gemini has \"dynamic thinking\" enabled by default\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinking: { enabled: false } } satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\tconst googleModel = model as Model<\"google-generative-ai\">;\n\t\t\tconst effort = clampReasoning(options.reasoning)!;\n\n\t\t\t// Gemini 3 Pro models use thinkingLevel exclusively instead of thinkingBudget.\n\t\t\t// https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\t\t\tif (isGemini3ProModel(googleModel)) {\n\t\t\t\treturn {\n\t\t\t\t\t...base,\n\t\t\t\t\tthinking: {\n\t\t\t\t\t\tenabled: true,\n\t\t\t\t\t\tlevel: getGoogleThinkingLevel(effort),\n\t\t\t\t\t},\n\t\t\t\t} satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinking: {\n\t\t\t\t\tenabled: true,\n\t\t\t\t\tbudgetTokens: getGoogleBudget(googleModel, effort),\n\t\t\t\t},\n\t\t\t} satisfies GoogleOptions;\n\t\t}\n\n\t\tdefault: {\n\t\t\t// Exhaustiveness check\n\t\t\tconst _exhaustive: never = model.api;\n\t\t\tthrow new Error(`Unhandled API in mapOptionsForApi: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\ntype ClampedReasoningEffort = Exclude<ReasoningEffort, \"xhigh\">;\n\nfunction isGemini3ProModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-pro, gemini-3-pro-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-pro\");\n}\n\nfunction getGoogleThinkingLevel(effort: ClampedReasoningEffort): ThinkingLevel {\n\t// Gemini 3 Pro only supports LOW/HIGH (for now)\n\tswitch (effort) {\n\t\tcase \"minimal\":\n\t\tcase \"low\":\n\t\t\treturn ThinkingLevel.LOW;\n\t\tcase \"medium\":\n\t\tcase \"high\":\n\t\t\treturn ThinkingLevel.HIGH;\n\t}\n}\n\nfunction getGoogleBudget(model: Model<\"google-generative-ai\">, effort: ClampedReasoningEffort): number {\n\t// See https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\tif (model.id.includes(\"2.5-pro\")) {\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 32768,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash\")) {\n\t\t// Covers 2.5-flash-lite as well\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\t// Unknown model - use dynamic\n\treturn -1;\n}\n"]}
1
+ {"version":3,"file":"stream.js","sourceRoot":"","sources":["../src/stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AAC9C,OAAO,EAAyB,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAClF,OAAO,EAAsB,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACzE,OAAO,EAAiC,uBAAuB,EAAE,MAAM,mCAAmC,CAAC;AAC3G,OAAO,EAA+B,qBAAqB,EAAE,MAAM,iCAAiC,CAAC;AAarG,MAAM,OAAO,GAAwB,IAAI,GAAG,EAAE,CAAC;AAI/C,MAAM,UAAU,SAAS,CAAC,QAAa,EAAE,GAAW,EAAQ;IAC3D,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;AAAA,CAC3B;AAID,MAAM,UAAU,SAAS,CAAC,QAAa,EAAsB;IAC5D,4BAA4B;IAC5B,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAClC,IAAI,GAAG;QAAE,OAAO,GAAG,CAAC;IAEpB,qCAAqC;IACrC,IAAI,QAAQ,KAAK,gBAAgB,EAAE,CAAC;QACnC,OAAO,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC;IAC7F,CAAC;IAED,MAAM,MAAM,GAA2B;QACtC,MAAM,EAAE,gBAAgB;QACxB,SAAS,EAAE,mBAAmB;QAC9B,MAAM,EAAE,gBAAgB;QACxB,IAAI,EAAE,cAAc;QACpB,QAAQ,EAAE,kBAAkB;QAC5B,GAAG,EAAE,aAAa;QAClB,UAAU,EAAE,oBAAoB;QAChC,GAAG,EAAE,aAAa;QAClB,OAAO,EAAE,iBAAiB;KAC1B,CAAC;IAEF,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;IAChC,OAAO,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAChD;AAED,MAAM,UAAU,MAAM,CACrB,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACC;IAC9B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IACD,MAAM,eAAe,GAAG,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,CAAC;IAE/C,MAAM,GAAG,GAAQ,KAAK,CAAC,GAAG,CAAC;IAC3B,QAAQ,GAAG,EAAE,CAAC;QACb,KAAK,oBAAoB;YACxB,OAAO,eAAe,CAAC,KAAoC,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;QAExF,KAAK,oBAAoB;YACxB,OAAO,uBAAuB,CAAC,KAAoC,EAAE,OAAO,EAAE,eAAsB,CAAC,CAAC;QAEvG,KAAK,kBAAkB;YACtB,OAAO,qBAAqB,CAAC,KAAkC,EAAE,OAAO,EAAE,eAAsB,CAAC,CAAC;QAEnG,KAAK,sBAAsB;YAC1B,OAAO,YAAY,CAAC,KAAsC,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;QAEvF,SAAS,CAAC;YACT,4DAA4D;YAC5D,MAAM,WAAW,GAAU,GAAG,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,kBAAkB,WAAW,EAAE,CAAC,CAAC;QAClD,CAAC;IACF,CAAC;AAAA,CACD;AAED,MAAM,CAAC,KAAK,UAAU,QAAQ,CAC7B,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACD;IAC5B,MAAM,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC,MAAM,EAAE,CAAC;AAAA,CAClB;AAED,MAAM,UAAU,YAAY,CAC3B,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACC;IAC9B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,eAAe,GAAG,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACjE,OAAO,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,CAC/C;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CACnC,KAAkB,EAClB,OAAgB,EAChB,OAA6B,EACD;IAC5B,MAAM,CAAC,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAChD,OAAO,CAAC,CAAC,MAAM,EAAE,CAAC;AAAA,CAClB;AAED,SAAS,gBAAgB,CACxB,KAAkB,EAClB,OAA6B,EAC7B,MAAe,EACO;IACtB,MAAM,IAAI,GAAG;QACZ,WAAW,EAAE,OAAO,EAAE,WAAW;QACjC,SAAS,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,CAAC;QACjE,MAAM,EAAE,OAAO,EAAE,MAAM;QACvB,MAAM,EAAE,MAAM,IAAI,OAAO,EAAE,MAAM;KACjC,CAAC;IAEF,oEAAoE;IACpE,MAAM,cAAc,GAAG,CAAC,MAAmC,EAAE,EAAE,CAAC,CAAC,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;IAEvG,QAAQ,KAAK,CAAC,GAAG,EAAE,CAAC;QACnB,KAAK,oBAAoB,EAAE,CAAC;YAC3B,8DAA8D;YAC9D,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC;gBACzB,OAAO,EAAE,GAAG,IAAI,EAAE,eAAe,EAAE,KAAK,EAA6B,CAAC;YACvE,CAAC;YAED,MAAM,gBAAgB,GAAG;gBACxB,OAAO,EAAE,IAAI;gBACb,GAAG,EAAE,IAAI;gBACT,MAAM,EAAE,IAAI;gBACZ,IAAI,EAAE,KAAK;aACX,CAAC;YAEF,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,IAAI;gBACrB,oBAAoB,EAAE,gBAAgB,CAAC,cAAc,CAAC,OAAO,CAAC,SAAS,CAAE,CAAC;aAC/C,CAAC;QAC9B,CAAC;QAED,KAAK,oBAAoB;YACxB,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,OAAO,EAAE,SAAS;aACA,CAAC;QAEtC,KAAK,kBAAkB;YACtB,OAAO;gBACN,GAAG,IAAI;gBACP,eAAe,EAAE,OAAO,EAAE,SAAS;aACF,CAAC;QAEpC,KAAK,sBAAsB,EAAE,CAAC;YAC7B,8DAA8D;YAC9D,0EAA0E;YAC1E,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC;gBACzB,OAAO,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,EAA0B,CAAC;YAC1E,CAAC;YAED,MAAM,WAAW,GAAG,KAAsC,CAAC;YAC3D,MAAM,MAAM,GAAG,cAAc,CAAC,OAAO,CAAC,SAAS,CAAE,CAAC;YAElD,2EAA2E;YAC3E,4DAA4D;YAC5D,IAAI,iBAAiB,CAAC,WAAW,CAAC,IAAI,mBAAmB,CAAC,WAAW,CAAC,EAAE,CAAC;gBACxE,OAAO;oBACN,GAAG,IAAI;oBACP,QAAQ,EAAE;wBACT,OAAO,EAAE,IAAI;wBACb,KAAK,EAAE,uBAAuB,CAAC,MAAM,EAAE,WAAW,CAAC;qBACnD;iBACuB,CAAC;YAC3B,CAAC;YAED,OAAO;gBACN,GAAG,IAAI;gBACP,QAAQ,EAAE;oBACT,OAAO,EAAE,IAAI;oBACb,YAAY,EAAE,eAAe,CAAC,WAAW,EAAE,MAAM,CAAC;iBAClD;aACuB,CAAC;QAC3B,CAAC;QAED,SAAS,CAAC;YACT,uBAAuB;YACvB,MAAM,WAAW,GAAU,KAAK,CAAC,GAAG,CAAC;YACrC,MAAM,IAAI,KAAK,CAAC,sCAAsC,WAAW,EAAE,CAAC,CAAC;QACtE,CAAC;IACF,CAAC;AAAA,CACD;AAID,SAAS,iBAAiB,CAAC,KAAoC,EAAW;IACzE,2FAA2F;IAC3F,OAAO,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AAAA,CAClC;AAED,SAAS,mBAAmB,CAAC,KAAoC,EAAW;IAC3E,+FAA+F;IAC/F,OAAO,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;AAAA,CACpC;AAED,SAAS,uBAAuB,CAAC,MAA8B,EAAE,KAAoC,EAAiB;IACrH,IAAI,iBAAiB,CAAC,KAAK,CAAC,EAAE,CAAC;QAC9B,gDAAgD;QAChD,QAAQ,MAAM,EAAE,CAAC;YAChB,KAAK,SAAS,CAAC;YACf,KAAK,KAAK;gBACT,OAAO,aAAa,CAAC,GAAG,CAAC;YAC1B,KAAK,QAAQ,CAAC;YACd,KAAK,MAAM;gBACV,OAAO,aAAa,CAAC,IAAI,CAAC;QAC5B,CAAC;IACF,CAAC;IACD,0CAA0C;IAC1C,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,SAAS;YACb,OAAO,aAAa,CAAC,OAAO,CAAC;QAC9B,KAAK,KAAK;YACT,OAAO,aAAa,CAAC,GAAG,CAAC;QAC1B,KAAK,QAAQ;YACZ,OAAO,aAAa,CAAC,MAAM,CAAC;QAC7B,KAAK,MAAM;YACV,OAAO,aAAa,CAAC,IAAI,CAAC;IAC5B,CAAC;AAAA,CACD;AAED,SAAS,eAAe,CAAC,KAAoC,EAAE,MAA8B,EAAU;IACtG,gEAAgE;IAChE,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAClC,MAAM,OAAO,GAA2C;YACvD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QACpC,gCAAgC;QAChC,MAAM,OAAO,GAA2C;YACvD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,8BAA8B;IAC9B,OAAO,CAAC,CAAC,CAAC;AAAA,CACV","sourcesContent":["import { ThinkingLevel } from \"@google/genai\";\nimport { type AnthropicOptions, streamAnthropic } from \"./providers/anthropic.js\";\nimport { type GoogleOptions, streamGoogle } from \"./providers/google.js\";\nimport { type OpenAICompletionsOptions, streamOpenAICompletions } from \"./providers/openai-completions.js\";\nimport { type OpenAIResponsesOptions, streamOpenAIResponses } from \"./providers/openai-responses.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tKnownProvider,\n\tModel,\n\tOptionsForApi,\n\tReasoningEffort,\n\tSimpleStreamOptions,\n} from \"./types.js\";\n\nconst apiKeys: Map<string, string> = new Map();\n\nexport function setApiKey(provider: KnownProvider, key: string): void;\nexport function setApiKey(provider: string, key: string): void;\nexport function setApiKey(provider: any, key: string): void {\n\tapiKeys.set(provider, key);\n}\n\nexport function getApiKey(provider: KnownProvider): string | undefined;\nexport function getApiKey(provider: string): string | undefined;\nexport function getApiKey(provider: any): string | undefined {\n\t// Check explicit keys first\n\tconst key = apiKeys.get(provider);\n\tif (key) return key;\n\n\t// Fall back to environment variables\n\tif (provider === \"github-copilot\") {\n\t\treturn process.env.COPILOT_GITHUB_TOKEN || process.env.GH_TOKEN || process.env.GITHUB_TOKEN;\n\t}\n\n\tconst envMap: Record<string, string> = {\n\t\topenai: \"OPENAI_API_KEY\",\n\t\tanthropic: \"ANTHROPIC_API_KEY\",\n\t\tgoogle: \"GEMINI_API_KEY\",\n\t\tgroq: \"GROQ_API_KEY\",\n\t\tcerebras: \"CEREBRAS_API_KEY\",\n\t\txai: \"XAI_API_KEY\",\n\t\topenrouter: \"OPENROUTER_API_KEY\",\n\t\tzai: \"ZAI_API_KEY\",\n\t\tmistral: \"MISTRAL_API_KEY\",\n\t};\n\n\tconst envVar = envMap[provider];\n\treturn envVar ? process.env[envVar] : undefined;\n}\n\nexport function stream<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\tconst providerOptions = { ...options, apiKey };\n\n\tconst api: Api = model.api;\n\tswitch (api) {\n\t\tcase \"anthropic-messages\":\n\t\t\treturn streamAnthropic(model as Model<\"anthropic-messages\">, context, providerOptions);\n\n\t\tcase \"openai-completions\":\n\t\t\treturn streamOpenAICompletions(model as Model<\"openai-completions\">, context, providerOptions as any);\n\n\t\tcase \"openai-responses\":\n\t\t\treturn streamOpenAIResponses(model as Model<\"openai-responses\">, context, providerOptions as any);\n\n\t\tcase \"google-generative-ai\":\n\t\t\treturn streamGoogle(model as Model<\"google-generative-ai\">, context, providerOptions);\n\n\t\tdefault: {\n\t\t\t// This should never be reached if all Api cases are handled\n\t\t\tconst _exhaustive: never = api;\n\t\t\tthrow new Error(`Unhandled API: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\nexport async function complete<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: OptionsForApi<TApi>,\n): Promise<AssistantMessage> {\n\tconst s = stream(model, context, options);\n\treturn s.result();\n}\n\nexport function streamSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream {\n\tconst apiKey = options?.apiKey || getApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst providerOptions = mapOptionsForApi(model, options, apiKey);\n\treturn stream(model, context, providerOptions);\n}\n\nexport async function completeSimple<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): Promise<AssistantMessage> {\n\tconst s = streamSimple(model, context, options);\n\treturn s.result();\n}\n\nfunction mapOptionsForApi<TApi extends Api>(\n\tmodel: Model<TApi>,\n\toptions?: SimpleStreamOptions,\n\tapiKey?: string,\n): OptionsForApi<TApi> {\n\tconst base = {\n\t\ttemperature: options?.temperature,\n\t\tmaxTokens: options?.maxTokens || Math.min(model.maxTokens, 32000),\n\t\tsignal: options?.signal,\n\t\tapiKey: apiKey || options?.apiKey,\n\t};\n\n\t// Helper to clamp xhigh to high for providers that don't support it\n\tconst clampReasoning = (effort: ReasoningEffort | undefined) => (effort === \"xhigh\" ? \"high\" : effort);\n\n\tswitch (model.api) {\n\t\tcase \"anthropic-messages\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinkingEnabled: false } satisfies AnthropicOptions;\n\t\t\t}\n\n\t\t\tconst anthropicBudgets = {\n\t\t\t\tminimal: 1024,\n\t\t\t\tlow: 2048,\n\t\t\t\tmedium: 8192,\n\t\t\t\thigh: 16384,\n\t\t\t};\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinkingEnabled: true,\n\t\t\t\tthinkingBudgetTokens: anthropicBudgets[clampReasoning(options.reasoning)!],\n\t\t\t} satisfies AnthropicOptions;\n\t\t}\n\n\t\tcase \"openai-completions\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAICompletionsOptions;\n\n\t\tcase \"openai-responses\":\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\treasoningEffort: options?.reasoning,\n\t\t\t} satisfies OpenAIResponsesOptions;\n\n\t\tcase \"google-generative-ai\": {\n\t\t\t// Explicitly disable thinking when reasoning is not specified\n\t\t\t// This is needed because Gemini has \"dynamic thinking\" enabled by default\n\t\t\tif (!options?.reasoning) {\n\t\t\t\treturn { ...base, thinking: { enabled: false } } satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\tconst googleModel = model as Model<\"google-generative-ai\">;\n\t\t\tconst effort = clampReasoning(options.reasoning)!;\n\n\t\t\t// Gemini 3 models use thinkingLevel exclusively instead of thinkingBudget.\n\t\t\t// https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\t\t\tif (isGemini3ProModel(googleModel) || isGemini3FlashModel(googleModel)) {\n\t\t\t\treturn {\n\t\t\t\t\t...base,\n\t\t\t\t\tthinking: {\n\t\t\t\t\t\tenabled: true,\n\t\t\t\t\t\tlevel: getGemini3ThinkingLevel(effort, googleModel),\n\t\t\t\t\t},\n\t\t\t\t} satisfies GoogleOptions;\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\t...base,\n\t\t\t\tthinking: {\n\t\t\t\t\tenabled: true,\n\t\t\t\t\tbudgetTokens: getGoogleBudget(googleModel, effort),\n\t\t\t\t},\n\t\t\t} satisfies GoogleOptions;\n\t\t}\n\n\t\tdefault: {\n\t\t\t// Exhaustiveness check\n\t\t\tconst _exhaustive: never = model.api;\n\t\t\tthrow new Error(`Unhandled API in mapOptionsForApi: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\ntype ClampedReasoningEffort = Exclude<ReasoningEffort, \"xhigh\">;\n\nfunction isGemini3ProModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-pro, gemini-3-pro-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-pro\");\n}\n\nfunction isGemini3FlashModel(model: Model<\"google-generative-ai\">): boolean {\n\t// Covers gemini-3-flash, gemini-3-flash-preview, and possible other prefixed ids in the future\n\treturn model.id.includes(\"3-flash\");\n}\n\nfunction getGemini3ThinkingLevel(effort: ClampedReasoningEffort, model: Model<\"google-generative-ai\">): ThinkingLevel {\n\tif (isGemini3ProModel(model)) {\n\t\t// Gemini 3 Pro only supports LOW/HIGH (for now)\n\t\tswitch (effort) {\n\t\t\tcase \"minimal\":\n\t\t\tcase \"low\":\n\t\t\t\treturn ThinkingLevel.LOW;\n\t\t\tcase \"medium\":\n\t\t\tcase \"high\":\n\t\t\t\treturn ThinkingLevel.HIGH;\n\t\t}\n\t}\n\t// Gemini 3 Flash supports all four levels\n\tswitch (effort) {\n\t\tcase \"minimal\":\n\t\t\treturn ThinkingLevel.MINIMAL;\n\t\tcase \"low\":\n\t\t\treturn ThinkingLevel.LOW;\n\t\tcase \"medium\":\n\t\t\treturn ThinkingLevel.MEDIUM;\n\t\tcase \"high\":\n\t\t\treturn ThinkingLevel.HIGH;\n\t}\n}\n\nfunction getGoogleBudget(model: Model<\"google-generative-ai\">, effort: ClampedReasoningEffort): number {\n\t// See https://ai.google.dev/gemini-api/docs/thinking#set-budget\n\tif (model.id.includes(\"2.5-pro\")) {\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 32768,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash\")) {\n\t\t// Covers 2.5-flash-lite as well\n\t\tconst budgets: Record<ClampedReasoningEffort, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\t// Unknown model - use dynamic\n\treturn -1;\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mariozechner/pi-ai",
3
- "version": "0.23.2",
3
+ "version": "0.23.4",
4
4
  "description": "Unified LLM API with automatic model discovery and provider configuration",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -21,7 +21,7 @@
21
21
  },
22
22
  "dependencies": {
23
23
  "@anthropic-ai/sdk": "0.71.2",
24
- "@google/genai": "1.31.0",
24
+ "@google/genai": "1.34.0",
25
25
  "@mistralai/mistralai": "1.10.0",
26
26
  "@sinclair/typebox": "^0.34.41",
27
27
  "ajv": "^8.17.1",