@eminent337/aery-ai 0.1.119

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (210) hide show
  1. package/README.md +1383 -0
  2. package/dist/api-registry.d.ts +20 -0
  3. package/dist/api-registry.d.ts.map +1 -0
  4. package/dist/api-registry.js +44 -0
  5. package/dist/api-registry.js.map +1 -0
  6. package/dist/bedrock-provider.d.ts +5 -0
  7. package/dist/bedrock-provider.d.ts.map +1 -0
  8. package/dist/bedrock-provider.js +6 -0
  9. package/dist/bedrock-provider.js.map +1 -0
  10. package/dist/cli.d.ts +3 -0
  11. package/dist/cli.d.ts.map +1 -0
  12. package/dist/cli.js +116 -0
  13. package/dist/cli.js.map +1 -0
  14. package/dist/env-api-keys.d.ts +18 -0
  15. package/dist/env-api-keys.d.ts.map +1 -0
  16. package/dist/env-api-keys.js +170 -0
  17. package/dist/env-api-keys.js.map +1 -0
  18. package/dist/image-models.d.ts +10 -0
  19. package/dist/image-models.d.ts.map +1 -0
  20. package/dist/image-models.generated.d.ts +260 -0
  21. package/dist/image-models.generated.d.ts.map +1 -0
  22. package/dist/image-models.generated.js +262 -0
  23. package/dist/image-models.generated.js.map +1 -0
  24. package/dist/image-models.js +23 -0
  25. package/dist/image-models.js.map +1 -0
  26. package/dist/images-api-registry.d.ts +14 -0
  27. package/dist/images-api-registry.d.ts.map +1 -0
  28. package/dist/images-api-registry.js +22 -0
  29. package/dist/images-api-registry.js.map +1 -0
  30. package/dist/images.d.ts +4 -0
  31. package/dist/images.d.ts.map +1 -0
  32. package/dist/images.js +14 -0
  33. package/dist/images.js.map +1 -0
  34. package/dist/index.d.ts +32 -0
  35. package/dist/index.d.ts.map +1 -0
  36. package/dist/index.js +20 -0
  37. package/dist/index.js.map +1 -0
  38. package/dist/models.d.ts +18 -0
  39. package/dist/models.d.ts.map +1 -0
  40. package/dist/models.generated.d.ts +18237 -0
  41. package/dist/models.generated.d.ts.map +1 -0
  42. package/dist/models.generated.js +17251 -0
  43. package/dist/models.generated.js.map +1 -0
  44. package/dist/models.js +71 -0
  45. package/dist/models.js.map +1 -0
  46. package/dist/oauth.d.ts +2 -0
  47. package/dist/oauth.d.ts.map +1 -0
  48. package/dist/oauth.js +2 -0
  49. package/dist/oauth.js.map +1 -0
  50. package/dist/providers/aery-error-formatting.d.ts +13 -0
  51. package/dist/providers/aery-error-formatting.d.ts.map +1 -0
  52. package/dist/providers/aery-error-formatting.js +112 -0
  53. package/dist/providers/aery-error-formatting.js.map +1 -0
  54. package/dist/providers/amazon-bedrock.d.ts +38 -0
  55. package/dist/providers/amazon-bedrock.d.ts.map +1 -0
  56. package/dist/providers/amazon-bedrock.js +750 -0
  57. package/dist/providers/amazon-bedrock.js.map +1 -0
  58. package/dist/providers/anthropic.d.ts +54 -0
  59. package/dist/providers/anthropic.d.ts.map +1 -0
  60. package/dist/providers/anthropic.js +960 -0
  61. package/dist/providers/anthropic.js.map +1 -0
  62. package/dist/providers/azure-openai-responses.d.ts +15 -0
  63. package/dist/providers/azure-openai-responses.d.ts.map +1 -0
  64. package/dist/providers/azure-openai-responses.js +208 -0
  65. package/dist/providers/azure-openai-responses.js.map +1 -0
  66. package/dist/providers/cloudflare.d.ts +13 -0
  67. package/dist/providers/cloudflare.d.ts.map +1 -0
  68. package/dist/providers/cloudflare.js +26 -0
  69. package/dist/providers/cloudflare.js.map +1 -0
  70. package/dist/providers/faux.d.ts +56 -0
  71. package/dist/providers/faux.d.ts.map +1 -0
  72. package/dist/providers/faux.js +368 -0
  73. package/dist/providers/faux.js.map +1 -0
  74. package/dist/providers/github-copilot-headers.d.ts +8 -0
  75. package/dist/providers/github-copilot-headers.d.ts.map +1 -0
  76. package/dist/providers/github-copilot-headers.js +29 -0
  77. package/dist/providers/github-copilot-headers.js.map +1 -0
  78. package/dist/providers/google-shared.d.ts +70 -0
  79. package/dist/providers/google-shared.d.ts.map +1 -0
  80. package/dist/providers/google-shared.js +329 -0
  81. package/dist/providers/google-shared.js.map +1 -0
  82. package/dist/providers/google-vertex.d.ts +15 -0
  83. package/dist/providers/google-vertex.d.ts.map +1 -0
  84. package/dist/providers/google-vertex.js +442 -0
  85. package/dist/providers/google-vertex.js.map +1 -0
  86. package/dist/providers/google.d.ts +13 -0
  87. package/dist/providers/google.d.ts.map +1 -0
  88. package/dist/providers/google.js +400 -0
  89. package/dist/providers/google.js.map +1 -0
  90. package/dist/providers/images/openrouter.d.ts +3 -0
  91. package/dist/providers/images/openrouter.d.ts.map +1 -0
  92. package/dist/providers/images/openrouter.js +129 -0
  93. package/dist/providers/images/openrouter.js.map +1 -0
  94. package/dist/providers/images/register-builtins.d.ts +4 -0
  95. package/dist/providers/images/register-builtins.d.ts.map +1 -0
  96. package/dist/providers/images/register-builtins.js +34 -0
  97. package/dist/providers/images/register-builtins.js.map +1 -0
  98. package/dist/providers/mistral.d.ts +25 -0
  99. package/dist/providers/mistral.d.ts.map +1 -0
  100. package/dist/providers/mistral.js +535 -0
  101. package/dist/providers/mistral.js.map +1 -0
  102. package/dist/providers/openai-codex-responses.d.ts +30 -0
  103. package/dist/providers/openai-codex-responses.d.ts.map +1 -0
  104. package/dist/providers/openai-codex-responses.js +1080 -0
  105. package/dist/providers/openai-codex-responses.js.map +1 -0
  106. package/dist/providers/openai-completions.d.ts +19 -0
  107. package/dist/providers/openai-completions.d.ts.map +1 -0
  108. package/dist/providers/openai-completions.js +936 -0
  109. package/dist/providers/openai-completions.js.map +1 -0
  110. package/dist/providers/openai-responses-shared.d.ts +18 -0
  111. package/dist/providers/openai-responses-shared.d.ts.map +1 -0
  112. package/dist/providers/openai-responses-shared.js +492 -0
  113. package/dist/providers/openai-responses-shared.js.map +1 -0
  114. package/dist/providers/openai-responses.d.ts +13 -0
  115. package/dist/providers/openai-responses.d.ts.map +1 -0
  116. package/dist/providers/openai-responses.js +220 -0
  117. package/dist/providers/openai-responses.js.map +1 -0
  118. package/dist/providers/register-builtins.d.ts +35 -0
  119. package/dist/providers/register-builtins.d.ts.map +1 -0
  120. package/dist/providers/register-builtins.js +243 -0
  121. package/dist/providers/register-builtins.js.map +1 -0
  122. package/dist/providers/simple-options.d.ts +8 -0
  123. package/dist/providers/simple-options.d.ts.map +1 -0
  124. package/dist/providers/simple-options.js +39 -0
  125. package/dist/providers/simple-options.js.map +1 -0
  126. package/dist/providers/transform-messages.d.ts +8 -0
  127. package/dist/providers/transform-messages.d.ts.map +1 -0
  128. package/dist/providers/transform-messages.js +184 -0
  129. package/dist/providers/transform-messages.js.map +1 -0
  130. package/dist/session-resources.d.ts +4 -0
  131. package/dist/session-resources.d.ts.map +1 -0
  132. package/dist/session-resources.js +22 -0
  133. package/dist/session-resources.js.map +1 -0
  134. package/dist/stream.d.ts +8 -0
  135. package/dist/stream.d.ts.map +1 -0
  136. package/dist/stream.js +27 -0
  137. package/dist/stream.js.map +1 -0
  138. package/dist/types.d.ts +488 -0
  139. package/dist/types.d.ts.map +1 -0
  140. package/dist/types.js +2 -0
  141. package/dist/types.js.map +1 -0
  142. package/dist/utils/diagnostics.d.ts +19 -0
  143. package/dist/utils/diagnostics.d.ts.map +1 -0
  144. package/dist/utils/diagnostics.js +25 -0
  145. package/dist/utils/diagnostics.js.map +1 -0
  146. package/dist/utils/event-stream.d.ts +21 -0
  147. package/dist/utils/event-stream.d.ts.map +1 -0
  148. package/dist/utils/event-stream.js +81 -0
  149. package/dist/utils/event-stream.js.map +1 -0
  150. package/dist/utils/hash.d.ts +3 -0
  151. package/dist/utils/hash.d.ts.map +1 -0
  152. package/dist/utils/hash.js +14 -0
  153. package/dist/utils/hash.js.map +1 -0
  154. package/dist/utils/headers.d.ts +2 -0
  155. package/dist/utils/headers.d.ts.map +1 -0
  156. package/dist/utils/headers.js +8 -0
  157. package/dist/utils/headers.js.map +1 -0
  158. package/dist/utils/json-parse.d.ts +16 -0
  159. package/dist/utils/json-parse.d.ts.map +1 -0
  160. package/dist/utils/json-parse.js +113 -0
  161. package/dist/utils/json-parse.js.map +1 -0
  162. package/dist/utils/node-http-proxy.d.ts +10 -0
  163. package/dist/utils/node-http-proxy.d.ts.map +1 -0
  164. package/dist/utils/node-http-proxy.js +34 -0
  165. package/dist/utils/node-http-proxy.js.map +1 -0
  166. package/dist/utils/oauth/anthropic.d.ts +25 -0
  167. package/dist/utils/oauth/anthropic.d.ts.map +1 -0
  168. package/dist/utils/oauth/anthropic.js +335 -0
  169. package/dist/utils/oauth/anthropic.js.map +1 -0
  170. package/dist/utils/oauth/github-copilot.d.ts +30 -0
  171. package/dist/utils/oauth/github-copilot.d.ts.map +1 -0
  172. package/dist/utils/oauth/github-copilot.js +292 -0
  173. package/dist/utils/oauth/github-copilot.js.map +1 -0
  174. package/dist/utils/oauth/index.d.ts +57 -0
  175. package/dist/utils/oauth/index.d.ts.map +1 -0
  176. package/dist/utils/oauth/index.js +121 -0
  177. package/dist/utils/oauth/index.js.map +1 -0
  178. package/dist/utils/oauth/oauth-page.d.ts +3 -0
  179. package/dist/utils/oauth/oauth-page.d.ts.map +1 -0
  180. package/dist/utils/oauth/oauth-page.js +105 -0
  181. package/dist/utils/oauth/oauth-page.js.map +1 -0
  182. package/dist/utils/oauth/openai-codex.d.ts +34 -0
  183. package/dist/utils/oauth/openai-codex.d.ts.map +1 -0
  184. package/dist/utils/oauth/openai-codex.js +385 -0
  185. package/dist/utils/oauth/openai-codex.js.map +1 -0
  186. package/dist/utils/oauth/pkce.d.ts +13 -0
  187. package/dist/utils/oauth/pkce.d.ts.map +1 -0
  188. package/dist/utils/oauth/pkce.js +31 -0
  189. package/dist/utils/oauth/pkce.js.map +1 -0
  190. package/dist/utils/oauth/types.d.ts +57 -0
  191. package/dist/utils/oauth/types.d.ts.map +1 -0
  192. package/dist/utils/oauth/types.js +2 -0
  193. package/dist/utils/oauth/types.js.map +1 -0
  194. package/dist/utils/overflow.d.ts +56 -0
  195. package/dist/utils/overflow.d.ts.map +1 -0
  196. package/dist/utils/overflow.js +149 -0
  197. package/dist/utils/overflow.js.map +1 -0
  198. package/dist/utils/sanitize-unicode.d.ts +22 -0
  199. package/dist/utils/sanitize-unicode.d.ts.map +1 -0
  200. package/dist/utils/sanitize-unicode.js +26 -0
  201. package/dist/utils/sanitize-unicode.js.map +1 -0
  202. package/dist/utils/typebox-helpers.d.ts +17 -0
  203. package/dist/utils/typebox-helpers.d.ts.map +1 -0
  204. package/dist/utils/typebox-helpers.js +21 -0
  205. package/dist/utils/typebox-helpers.js.map +1 -0
  206. package/dist/utils/validation.d.ts +18 -0
  207. package/dist/utils/validation.d.ts.map +1 -0
  208. package/dist/utils/validation.js +281 -0
  209. package/dist/utils/validation.js.map +1 -0
  210. package/package.json +108 -0
@@ -0,0 +1,400 @@
1
+ import { GoogleGenAI, } from "@google/genai";
2
+ import { getEnvApiKey } from "../env-api-keys.js";
3
+ import { calculateCost, clampThinkingLevel } from "../models.js";
4
+ import { AssistantMessageEventStream } from "../utils/event-stream.js";
5
+ import { sanitizeSurrogates } from "../utils/sanitize-unicode.js";
6
+ import { convertMessages, convertTools, isThinkingPart, mapStopReason, mapToolChoice, retainThoughtSignature, } from "./google-shared.js";
7
+ import { buildBaseOptions } from "./simple-options.js";
8
+ // Counter for generating unique tool call IDs
9
+ let toolCallCounter = 0;
10
+ export const streamGoogle = (model, context, options) => {
11
+ const stream = new AssistantMessageEventStream();
12
+ (async () => {
13
+ const output = {
14
+ role: "assistant",
15
+ content: [],
16
+ api: "google-generative-ai",
17
+ provider: model.provider,
18
+ model: model.id,
19
+ usage: {
20
+ input: 0,
21
+ output: 0,
22
+ cacheRead: 0,
23
+ cacheWrite: 0,
24
+ totalTokens: 0,
25
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
26
+ },
27
+ stopReason: "stop",
28
+ timestamp: Date.now(),
29
+ };
30
+ try {
31
+ const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
32
+ const client = createClient(model, apiKey, options?.headers);
33
+ let params = buildParams(model, context, options);
34
+ const nextParams = await options?.onPayload?.(params, model);
35
+ if (nextParams !== undefined) {
36
+ params = nextParams;
37
+ }
38
+ const googleStream = await client.models.generateContentStream(params);
39
+ stream.push({ type: "start", partial: output });
40
+ let currentBlock = null;
41
+ const blocks = output.content;
42
+ const blockIndex = () => blocks.length - 1;
43
+ for await (const chunk of googleStream) {
44
+ // @google/genai documents GenerateContentResponse.responseId as an output-only field
45
+ // used to identify each response. Keep the first non-empty one from the stream.
46
+ output.responseId ||= chunk.responseId;
47
+ const candidate = chunk.candidates?.[0];
48
+ if (candidate?.content?.parts) {
49
+ for (const part of candidate.content.parts) {
50
+ if (part.text !== undefined) {
51
+ const isThinking = isThinkingPart(part);
52
+ if (!currentBlock ||
53
+ (isThinking && currentBlock.type !== "thinking") ||
54
+ (!isThinking && currentBlock.type !== "text")) {
55
+ if (currentBlock) {
56
+ if (currentBlock.type === "text") {
57
+ stream.push({
58
+ type: "text_end",
59
+ contentIndex: blocks.length - 1,
60
+ content: currentBlock.text,
61
+ partial: output,
62
+ });
63
+ }
64
+ else {
65
+ stream.push({
66
+ type: "thinking_end",
67
+ contentIndex: blockIndex(),
68
+ content: currentBlock.thinking,
69
+ partial: output,
70
+ });
71
+ }
72
+ }
73
+ if (isThinking) {
74
+ currentBlock = { type: "thinking", thinking: "", thinkingSignature: undefined };
75
+ output.content.push(currentBlock);
76
+ stream.push({ type: "thinking_start", contentIndex: blockIndex(), partial: output });
77
+ }
78
+ else {
79
+ currentBlock = { type: "text", text: "" };
80
+ output.content.push(currentBlock);
81
+ stream.push({ type: "text_start", contentIndex: blockIndex(), partial: output });
82
+ }
83
+ }
84
+ if (currentBlock.type === "thinking") {
85
+ currentBlock.thinking += part.text;
86
+ currentBlock.thinkingSignature = retainThoughtSignature(currentBlock.thinkingSignature, part.thoughtSignature);
87
+ stream.push({
88
+ type: "thinking_delta",
89
+ contentIndex: blockIndex(),
90
+ delta: part.text,
91
+ partial: output,
92
+ });
93
+ }
94
+ else {
95
+ currentBlock.text += part.text;
96
+ currentBlock.textSignature = retainThoughtSignature(currentBlock.textSignature, part.thoughtSignature);
97
+ stream.push({
98
+ type: "text_delta",
99
+ contentIndex: blockIndex(),
100
+ delta: part.text,
101
+ partial: output,
102
+ });
103
+ }
104
+ }
105
+ if (part.functionCall) {
106
+ if (currentBlock) {
107
+ if (currentBlock.type === "text") {
108
+ stream.push({
109
+ type: "text_end",
110
+ contentIndex: blockIndex(),
111
+ content: currentBlock.text,
112
+ partial: output,
113
+ });
114
+ }
115
+ else {
116
+ stream.push({
117
+ type: "thinking_end",
118
+ contentIndex: blockIndex(),
119
+ content: currentBlock.thinking,
120
+ partial: output,
121
+ });
122
+ }
123
+ currentBlock = null;
124
+ }
125
+ // Generate unique ID if not provided or if it's a duplicate
126
+ const providedId = part.functionCall.id;
127
+ const needsNewId = !providedId || output.content.some((b) => b.type === "toolCall" && b.id === providedId);
128
+ const toolCallId = needsNewId
129
+ ? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`
130
+ : providedId;
131
+ const toolCall = {
132
+ type: "toolCall",
133
+ id: toolCallId,
134
+ name: part.functionCall.name || "",
135
+ arguments: part.functionCall.args ?? {},
136
+ ...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),
137
+ };
138
+ output.content.push(toolCall);
139
+ stream.push({ type: "toolcall_start", contentIndex: blockIndex(), partial: output });
140
+ stream.push({
141
+ type: "toolcall_delta",
142
+ contentIndex: blockIndex(),
143
+ delta: JSON.stringify(toolCall.arguments),
144
+ partial: output,
145
+ });
146
+ stream.push({ type: "toolcall_end", contentIndex: blockIndex(), toolCall, partial: output });
147
+ }
148
+ }
149
+ }
150
+ if (candidate?.finishReason) {
151
+ output.stopReason = mapStopReason(candidate.finishReason);
152
+ if (output.content.some((b) => b.type === "toolCall")) {
153
+ output.stopReason = "toolUse";
154
+ }
155
+ }
156
+ if (chunk.usageMetadata) {
157
+ output.usage = {
158
+ input: (chunk.usageMetadata.promptTokenCount || 0) - (chunk.usageMetadata.cachedContentTokenCount || 0),
159
+ output: (chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),
160
+ cacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,
161
+ cacheWrite: 0,
162
+ totalTokens: chunk.usageMetadata.totalTokenCount || 0,
163
+ cost: {
164
+ input: 0,
165
+ output: 0,
166
+ cacheRead: 0,
167
+ cacheWrite: 0,
168
+ total: 0,
169
+ },
170
+ };
171
+ calculateCost(model, output.usage);
172
+ }
173
+ }
174
+ if (currentBlock) {
175
+ if (currentBlock.type === "text") {
176
+ stream.push({
177
+ type: "text_end",
178
+ contentIndex: blockIndex(),
179
+ content: currentBlock.text,
180
+ partial: output,
181
+ });
182
+ }
183
+ else {
184
+ stream.push({
185
+ type: "thinking_end",
186
+ contentIndex: blockIndex(),
187
+ content: currentBlock.thinking,
188
+ partial: output,
189
+ });
190
+ }
191
+ }
192
+ if (options?.signal?.aborted) {
193
+ throw new Error("Request was aborted");
194
+ }
195
+ if (output.stopReason === "aborted" || output.stopReason === "error") {
196
+ throw new Error("An unknown error occurred");
197
+ }
198
+ stream.push({ type: "done", reason: output.stopReason, message: output });
199
+ stream.end();
200
+ }
201
+ catch (error) {
202
+ // Remove internal index property used during streaming
203
+ for (const block of output.content) {
204
+ if ("index" in block) {
205
+ delete block.index;
206
+ }
207
+ }
208
+ output.stopReason = options?.signal?.aborted ? "aborted" : "error";
209
+ output.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
210
+ stream.push({ type: "error", reason: output.stopReason, error: output });
211
+ stream.end();
212
+ }
213
+ })();
214
+ return stream;
215
+ };
216
+ export const streamSimpleGoogle = (model, context, options) => {
217
+ const apiKey = options?.apiKey || getEnvApiKey(model.provider);
218
+ if (!apiKey) {
219
+ throw new Error(`No API key for provider: ${model.provider}`);
220
+ }
221
+ const base = buildBaseOptions(model, options, apiKey);
222
+ if (!options?.reasoning) {
223
+ return streamGoogle(model, context, { ...base, thinking: { enabled: false } });
224
+ }
225
+ const clampedReasoning = clampThinkingLevel(model, options.reasoning);
226
+ const effort = (clampedReasoning === "off" ? "high" : clampedReasoning);
227
+ const googleModel = model;
228
+ if (isGemini3ProModel(googleModel) || isGemini3FlashModel(googleModel) || isGemma4Model(googleModel)) {
229
+ return streamGoogle(model, context, {
230
+ ...base,
231
+ thinking: {
232
+ enabled: true,
233
+ level: getThinkingLevel(effort, googleModel),
234
+ },
235
+ });
236
+ }
237
+ return streamGoogle(model, context, {
238
+ ...base,
239
+ thinking: {
240
+ enabled: true,
241
+ budgetTokens: getGoogleBudget(googleModel, effort, options.thinkingBudgets),
242
+ },
243
+ });
244
+ };
245
+ function createClient(model, apiKey, optionsHeaders) {
246
+ const httpOptions = {};
247
+ if (model.baseUrl) {
248
+ httpOptions.baseUrl = model.baseUrl;
249
+ httpOptions.apiVersion = ""; // baseUrl already includes version path, don't append
250
+ }
251
+ if (model.headers || optionsHeaders) {
252
+ httpOptions.headers = { ...model.headers, ...optionsHeaders };
253
+ }
254
+ return new GoogleGenAI({
255
+ apiKey,
256
+ httpOptions: Object.keys(httpOptions).length > 0 ? httpOptions : undefined,
257
+ });
258
+ }
259
+ function buildParams(model, context, options = {}) {
260
+ const contents = convertMessages(model, context);
261
+ const generationConfig = {};
262
+ if (options.temperature !== undefined) {
263
+ generationConfig.temperature = options.temperature;
264
+ }
265
+ if (options.maxTokens !== undefined) {
266
+ generationConfig.maxOutputTokens = options.maxTokens;
267
+ }
268
+ const config = {
269
+ ...(Object.keys(generationConfig).length > 0 && generationConfig),
270
+ ...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),
271
+ ...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),
272
+ };
273
+ if (context.tools && context.tools.length > 0 && options.toolChoice) {
274
+ config.toolConfig = {
275
+ functionCallingConfig: {
276
+ mode: mapToolChoice(options.toolChoice),
277
+ },
278
+ };
279
+ }
280
+ else {
281
+ config.toolConfig = undefined;
282
+ }
283
+ if (options.thinking?.enabled && model.reasoning) {
284
+ const thinkingConfig = { includeThoughts: true };
285
+ if (options.thinking.level !== undefined) {
286
+ // Cast to any since our GoogleThinkingLevel mirrors Google's ThinkingLevel enum values
287
+ thinkingConfig.thinkingLevel = options.thinking.level;
288
+ }
289
+ else if (options.thinking.budgetTokens !== undefined) {
290
+ thinkingConfig.thinkingBudget = options.thinking.budgetTokens;
291
+ }
292
+ config.thinkingConfig = thinkingConfig;
293
+ }
294
+ else if (model.reasoning && options.thinking && !options.thinking.enabled) {
295
+ config.thinkingConfig = getDisabledThinkingConfig(model);
296
+ }
297
+ if (options.signal) {
298
+ if (options.signal.aborted) {
299
+ throw new Error("Request aborted");
300
+ }
301
+ config.abortSignal = options.signal;
302
+ }
303
+ const params = {
304
+ model: model.id,
305
+ contents,
306
+ config,
307
+ };
308
+ return params;
309
+ }
310
+ function isGemma4Model(model) {
311
+ return /gemma-?4/.test(model.id.toLowerCase());
312
+ }
313
+ function isGemini3ProModel(model) {
314
+ return /gemini-3(?:\.\d+)?-pro/.test(model.id.toLowerCase());
315
+ }
316
+ function isGemini3FlashModel(model) {
317
+ return /gemini-3(?:\.\d+)?-flash/.test(model.id.toLowerCase());
318
+ }
319
+ function getDisabledThinkingConfig(model) {
320
+ // Google docs: Gemini 3.1 Pro cannot disable thinking, and Gemini 3 Flash / Flash-Lite
321
+ // do not support full thinking-off either. For Gemini 3 models, use the lowest supported
322
+ // thinkingLevel without includeThoughts so hidden thinking remains invisible to pi.
323
+ if (isGemini3ProModel(model)) {
324
+ return { thinkingLevel: "LOW" };
325
+ }
326
+ if (isGemini3FlashModel(model)) {
327
+ return { thinkingLevel: "MINIMAL" };
328
+ }
329
+ if (isGemma4Model(model)) {
330
+ return { thinkingLevel: "MINIMAL" };
331
+ }
332
+ // Gemini 2.x supports disabling via thinkingBudget = 0.
333
+ return { thinkingBudget: 0 };
334
+ }
335
+ function getThinkingLevel(effort, model) {
336
+ if (isGemini3ProModel(model)) {
337
+ switch (effort) {
338
+ case "minimal":
339
+ case "low":
340
+ return "LOW";
341
+ case "medium":
342
+ case "high":
343
+ return "HIGH";
344
+ }
345
+ }
346
+ if (isGemma4Model(model)) {
347
+ switch (effort) {
348
+ case "minimal":
349
+ case "low":
350
+ return "MINIMAL";
351
+ case "medium":
352
+ case "high":
353
+ return "HIGH";
354
+ }
355
+ }
356
+ switch (effort) {
357
+ case "minimal":
358
+ return "MINIMAL";
359
+ case "low":
360
+ return "LOW";
361
+ case "medium":
362
+ return "MEDIUM";
363
+ case "high":
364
+ return "HIGH";
365
+ }
366
+ }
367
+ function getGoogleBudget(model, effort, customBudgets) {
368
+ if (customBudgets?.[effort] !== undefined) {
369
+ return customBudgets[effort];
370
+ }
371
+ if (model.id.includes("2.5-pro")) {
372
+ const budgets = {
373
+ minimal: 128,
374
+ low: 2048,
375
+ medium: 8192,
376
+ high: 32768,
377
+ };
378
+ return budgets[effort];
379
+ }
380
+ if (model.id.includes("2.5-flash-lite")) {
381
+ const budgets = {
382
+ minimal: 512,
383
+ low: 2048,
384
+ medium: 8192,
385
+ high: 24576,
386
+ };
387
+ return budgets[effort];
388
+ }
389
+ if (model.id.includes("2.5-flash")) {
390
+ const budgets = {
391
+ minimal: 128,
392
+ low: 2048,
393
+ medium: 8192,
394
+ high: 24576,
395
+ };
396
+ return budgets[effort];
397
+ }
398
+ return -1;
399
+ }
400
+ //# sourceMappingURL=google.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"google.js","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAGN,WAAW,GAEX,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,cAAc,CAAC;AAejE,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAElE,OAAO,EACN,eAAe,EACf,YAAY,EACZ,cAAc,EACd,aAAa,EACb,aAAa,EACb,sBAAsB,GACtB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAWvD,8CAA8C;AAC9C,IAAI,eAAe,GAAG,CAAC,CAAC;AAExB,MAAM,CAAC,MAAM,YAAY,GAA0D,CAClF,KAAoC,EACpC,OAAgB,EAChB,OAAuB,EACO,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,sBAA6B;YAClC,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACrE,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC7D,IAAI,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAClD,MAAM,UAAU,GAAG,MAAM,OAAO,EAAE,SAAS,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;YAC7D,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;gBAC9B,MAAM,GAAG,UAAuC,CAAC;YAClD,CAAC;YACD,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;YAEvE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAChD,IAAI,YAAY,GAAyC,IAAI,CAAC;YAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;YAC9B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;YAC3C,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;gBACxC,qFAAqF;gBACrF,gFAAgF;gBAChF,MAAM,CAAC,UAAU,KAAK,KAAK,CAAC,UAAU,CAAC;gBACvC,MAAM,SAAS,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;gBACxC,IAAI,SAAS,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;oBAC/B,KAAK,MAAM,IAAI,IAAI,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;wBAC5C,IAAI,IAAI,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;4BAC7B,MAAM,UAAU,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC;4BACxC,IACC,CAAC,YAAY;gCACb,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,CAAC;gCAChD,CAAC,CAAC,UAAU,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,CAAC,EAC5C,CAAC;gCACF,IAAI,YAAY,EAAE,CAAC;oCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wCAClC,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,UAAU;4CAChB,YAAY,EAAE,MAAM,CAAC,MAAM,GAAG,CAAC;4CAC/B,OAAO,EAAE,YAAY,CAAC,IAAI;4CAC1B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;yCAAM,CAAC;wCACP,MAAM,CAAC,IAAI,CAAC;4CACX,IAAI,EAAE,cAAc;4CACpB,YAAY,EAAE,UAAU,EAAE;4CAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;4CAC9B,OAAO,EAAE,MAAM;yCACf,CAAC,CAAC;oCACJ,CAAC;gCACF,CAAC;gCACD,IAAI,UAAU,EAAE,CAAC;oCAChB,YAAY,GAAG,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,iBAAiB,EAAE,SAAS,EAAE,CAAC;oCAChF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCACtF,CAAC;qCAAM,CAAC;oCACP,YAAY,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;oCAC1C,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;gCAClF,CAAC;4BACF,CAAC;4BACD,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gCACtC,YAAY,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;gCACnC,YAAY,CAAC,iBAAiB,GAAG,sBAAsB,CACtD,YAAY,CAAC,iBAAiB,EAC9B,IAAI,CAAC,gBAAgB,CACrB,CAAC;gCACF,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,gBAAgB;oCACtB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;iCAAM,CAAC;gCACP,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC;gCAC/B,YAAY,CAAC,aAAa,GAAG,sBAAsB,CAClD,YAAY,CAAC,aAAa,EAC1B,IAAI,CAAC,gBAAgB,CACrB,CAAC;gCACF,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,YAAY;oCAClB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK,EAAE,IAAI,CAAC,IAAI;oCAChB,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;wBACF,CAAC;wBAED,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;4BACvB,IAAI,YAAY,EAAE,CAAC;gCAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oCAClC,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,UAAU;wCAChB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wCAC1B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;qCAAM,CAAC;oCACP,MAAM,CAAC,IAAI,CAAC;wCACX,IAAI,EAAE,cAAc;wCACpB,YAAY,EAAE,UAAU,EAAE;wCAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wCAC9B,OAAO,EAAE,MAAM;qCACf,CAAC,CAAC;gCACJ,CAAC;gCACD,YAAY,GAAG,IAAI,CAAC;4BACrB,CAAC;4BAED,4DAA4D;4BAC5D,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;4BACxC,MAAM,UAAU,GACf,CAAC,UAAU,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,UAAU,CAAC,CAAC;4BACzF,MAAM,UAAU,GAAG,UAAU;gCAC5B,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,EAAE;gCAChE,CAAC,CAAC,UAAU,CAAC;4BAEd,MAAM,QAAQ,GAAa;gCAC1B,IAAI,EAAE,UAAU;gCAChB,EAAE,EAAE,UAAU;gCACd,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,IAAI,EAAE;gCAClC,SAAS,EAAG,IAAI,CAAC,YAAY,CAAC,IAA4B,IAAI,EAAE;gCAChE,GAAG,CAAC,IAAI,CAAC,gBAAgB,IAAI,EAAE,gBAAgB,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC;6BACzE,CAAC;4BAEF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;4BAC9B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;4BACrF,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,CAAC;gCACzC,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;4BACH,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBAC9F,CAAC;oBACF,CAAC;gBACF,CAAC;gBAED,IAAI,SAAS,EAAE,YAAY,EAAE,CAAC;oBAC7B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,EAAE,CAAC;wBACvD,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;oBAC/B,CAAC;gBACF,CAAC;gBAED,IAAI,KAAK,CAAC,aAAa,EAAE,CAAC;oBACzB,MAAM,CAAC,KAAK,GAAG;wBACd,KAAK,EACJ,CAAC,KAAK,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC,CAAC;wBACjG,MAAM,EACL,CAAC,KAAK,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,aAAa,CAAC,kBAAkB,IAAI,CAAC,CAAC;wBAChG,SAAS,EAAE,KAAK,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC;wBAC3D,UAAU,EAAE,CAAC;wBACb,WAAW,EAAE,KAAK,CAAC,aAAa,CAAC,eAAe,IAAI,CAAC;wBACrD,IAAI,EAAE;4BACL,KAAK,EAAE,CAAC;4BACR,MAAM,EAAE,CAAC;4BACT,SAAS,EAAE,CAAC;4BACZ,UAAU,EAAE,CAAC;4BACb,KAAK,EAAE,CAAC;yBACR;qBACD,CAAC;oBACF,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;YACF,CAAC;YAED,IAAI,YAAY,EAAE,CAAC;gBAClB,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAClC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,UAAU;wBAChB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,IAAI;wBAC1B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACP,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,cAAc;wBACpB,YAAY,EAAE,UAAU,EAAE;wBAC1B,OAAO,EAAE,YAAY,CAAC,QAAQ;wBAC9B,OAAO,EAAE,MAAM;qBACf,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YAED,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;YAC9C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,uDAAuD;YACvD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACpC,IAAI,OAAO,IAAI,KAAK,EAAE,CAAC;oBACtB,OAAQ,KAA4B,CAAC,KAAK,CAAC;gBAC5C,CAAC;YACF,CAAC;YACD,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAgE,CAC9F,KAAoC,EACpC,OAAgB,EAChB,OAA6B,EACC,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC/D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,IAAI,GAAG,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtD,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC;QACzB,OAAO,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,EAA0B,CAAC,CAAC;IACxG,CAAC;IAED,MAAM,gBAAgB,GAAG,kBAAkB,CAAC,KAAK,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;IACtE,MAAM,MAAM,GAAG,CAAC,gBAAgB,KAAK,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,gBAAgB,CAAyB,CAAC;IAChG,MAAM,WAAW,GAAG,KAAsC,CAAC;IAE3D,IAAI,iBAAiB,CAAC,WAAW,CAAC,IAAI,mBAAmB,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,EAAE,CAAC;QACtG,OAAO,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE;YACnC,GAAG,IAAI;YACP,QAAQ,EAAE;gBACT,OAAO,EAAE,IAAI;gBACb,KAAK,EAAE,gBAAgB,CAAC,MAAM,EAAE,WAAW,CAAC;aAC5C;SACuB,CAAC,CAAC;IAC5B,CAAC;IAED,OAAO,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE;QACnC,GAAG,IAAI;QACP,QAAQ,EAAE;YACT,OAAO,EAAE,IAAI;YACb,YAAY,EAAE,eAAe,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,eAAe,CAAC;SAC3E;KACuB,CAAC,CAAC;AAAA,CAC3B,CAAC;AAEF,SAAS,YAAY,CACpB,KAAoC,EACpC,MAAe,EACf,cAAuC,EACzB;IACd,MAAM,WAAW,GAAgF,EAAE,CAAC;IACpG,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;QACnB,WAAW,CAAC,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QACpC,WAAW,CAAC,UAAU,GAAG,EAAE,CAAC,CAAC,sDAAsD;IACpF,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,IAAI,cAAc,EAAE,CAAC;QACrC,WAAW,CAAC,OAAO,GAAG,EAAE,GAAG,KAAK,CAAC,OAAO,EAAE,GAAG,cAAc,EAAE,CAAC;IAC/D,CAAC;IAED,OAAO,IAAI,WAAW,CAAC;QACtB,MAAM;QACN,WAAW,EAAE,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS;KAC1E,CAAC,CAAC;AAAA,CACH;AAED,SAAS,WAAW,CACnB,KAAoC,EACpC,OAAgB,EAChB,OAAO,GAAkB,EAAE,EACC;IAC5B,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAEjD,MAAM,gBAAgB,GAA0B,EAAE,CAAC;IACnD,IAAI,OAAO,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;QACvC,gBAAgB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IACpD,CAAC;IACD,IAAI,OAAO,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;QACrC,gBAAgB,CAAC,eAAe,GAAG,OAAO,CAAC,SAAS,CAAC;IACtD,CAAC;IAED,MAAM,MAAM,GAA0B;QACrC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,gBAAgB,CAAC;QACjE,GAAG,CAAC,OAAO,CAAC,YAAY,IAAI,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,CAAC;QAC5F,GAAG,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;KACxF,CAAC;IAEF,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;QACrE,MAAM,CAAC,UAAU,GAAG;YACnB,qBAAqB,EAAE;gBACtB,IAAI,EAAE,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC;aACvC;SACD,CAAC;IACH,CAAC;SAAM,CAAC;QACP,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/B,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,EAAE,OAAO,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QAClD,MAAM,cAAc,GAAmB,EAAE,eAAe,EAAE,IAAI,EAAE,CAAC;QACjE,IAAI,OAAO,CAAC,QAAQ,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC1C,uFAAuF;YACvF,cAAc,CAAC,aAAa,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAY,CAAC;QAC9D,CAAC;aAAM,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,KAAK,SAAS,EAAE,CAAC;YACxD,cAAc,CAAC,cAAc,GAAG,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;QAC/D,CAAC;QACD,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC;IACxC,CAAC;SAAM,IAAI,KAAK,CAAC,SAAS,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC;QAC7E,MAAM,CAAC,cAAc,GAAG,yBAAyB,CAAC,KAAK,CAAC,CAAC;IAC1D,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QACpB,IAAI,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAC5B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;QACpC,CAAC;QACD,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,MAAM,CAAC;IACrC,CAAC;IAED,MAAM,MAAM,GAA8B;QACzC,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ;QACR,MAAM;KACN,CAAC;IAEF,OAAO,MAAM,CAAC;AAAA,CACd;AAID,SAAS,aAAa,CAAC,KAAoC,EAAW;IACrE,OAAO,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;AAAA,CAC/C;AAED,SAAS,iBAAiB,CAAC,KAAoC,EAAW;IACzE,OAAO,wBAAwB,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;AAAA,CAC7D;AAED,SAAS,mBAAmB,CAAC,KAAoC,EAAW;IAC3E,OAAO,0BAA0B,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;AAAA,CAC/D;AAED,SAAS,yBAAyB,CAAC,KAAoC,EAAkB;IACxF,uFAAuF;IACvF,yFAAyF;IACzF,oFAAoF;IACpF,IAAI,iBAAiB,CAAC,KAAK,CAAC,EAAE,CAAC;QAC9B,OAAO,EAAE,aAAa,EAAE,KAAY,EAAE,CAAC;IACxC,CAAC;IACD,IAAI,mBAAmB,CAAC,KAAK,CAAC,EAAE,CAAC;QAChC,OAAO,EAAE,aAAa,EAAE,SAAgB,EAAE,CAAC;IAC5C,CAAC;IACD,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC;QAC1B,OAAO,EAAE,aAAa,EAAE,SAAgB,EAAE,CAAC;IAC5C,CAAC;IAED,wDAAwD;IACxD,OAAO,EAAE,cAAc,EAAE,CAAC,EAAE,CAAC;AAAA,CAC7B;AAED,SAAS,gBAAgB,CAAC,MAA4B,EAAE,KAAoC,EAAuB;IAClH,IAAI,iBAAiB,CAAC,KAAK,CAAC,EAAE,CAAC;QAC9B,QAAQ,MAAM,EAAE,CAAC;YAChB,KAAK,SAAS,CAAC;YACf,KAAK,KAAK;gBACT,OAAO,KAAK,CAAC;YACd,KAAK,QAAQ,CAAC;YACd,KAAK,MAAM;gBACV,OAAO,MAAM,CAAC;QAChB,CAAC;IACF,CAAC;IACD,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC;QAC1B,QAAQ,MAAM,EAAE,CAAC;YAChB,KAAK,SAAS,CAAC;YACf,KAAK,KAAK;gBACT,OAAO,SAAS,CAAC;YAClB,KAAK,QAAQ,CAAC;YACd,KAAK,MAAM;gBACV,OAAO,MAAM,CAAC;QAChB,CAAC;IACF,CAAC;IACD,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,SAAS;YACb,OAAO,SAAS,CAAC;QAClB,KAAK,KAAK;YACT,OAAO,KAAK,CAAC;QACd,KAAK,QAAQ;YACZ,OAAO,QAAQ,CAAC;QACjB,KAAK,MAAM;YACV,OAAO,MAAM,CAAC;IAChB,CAAC;AAAA,CACD;AAED,SAAS,eAAe,CACvB,KAAoC,EACpC,MAA4B,EAC5B,aAA+B,EACtB;IACT,IAAI,aAAa,EAAE,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE,CAAC;QAC3C,OAAO,aAAa,CAAC,MAAM,CAAE,CAAC;IAC/B,CAAC;IAED,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAClC,MAAM,OAAO,GAAyC;YACrD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,gBAAgB,CAAC,EAAE,CAAC;QACzC,MAAM,OAAO,GAAyC;YACrD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QACpC,MAAM,OAAO,GAAyC;YACrD,OAAO,EAAE,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,KAAK;SACX,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;IAED,OAAO,CAAC,CAAC,CAAC;AAAA,CACV","sourcesContent":["import {\n\ttype GenerateContentConfig,\n\ttype GenerateContentParameters,\n\tGoogleGenAI,\n\ttype ThinkingConfig,\n} from \"@google/genai\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, clampThinkingLevel } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tSimpleStreamOptions,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingBudgets,\n\tThinkingContent,\n\tThinkingLevel,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport type { GoogleThinkingLevel } from \"./google-shared.js\";\nimport {\n\tconvertMessages,\n\tconvertTools,\n\tisThinkingPart,\n\tmapStopReason,\n\tmapToolChoice,\n\tretainThoughtSignature,\n} from \"./google-shared.js\";\nimport { buildBaseOptions } from \"./simple-options.js\";\n\nexport interface GoogleOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\";\n\tthinking?: {\n\t\tenabled: boolean;\n\t\tbudgetTokens?: number; // -1 for dynamic, 0 to disable\n\t\tlevel?: GoogleThinkingLevel;\n\t};\n}\n\n// Counter for generating unique tool call IDs\nlet toolCallCounter = 0;\n\nexport const streamGoogle: StreamFunction<\"google-generative-ai\", GoogleOptions> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: GoogleOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"google-generative-ai\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tconst client = createClient(model, apiKey, options?.headers);\n\t\t\tlet params = buildParams(model, context, options);\n\t\t\tconst nextParams = await options?.onPayload?.(params, model);\n\t\t\tif (nextParams !== undefined) {\n\t\t\t\tparams = nextParams as GenerateContentParameters;\n\t\t\t}\n\t\t\tconst googleStream = await client.models.generateContentStream(params);\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tfor await (const chunk of googleStream) {\n\t\t\t\t// @google/genai documents GenerateContentResponse.responseId as an output-only field\n\t\t\t\t// used to identify each response. Keep the first non-empty one from the stream.\n\t\t\t\toutput.responseId ||= chunk.responseId;\n\t\t\t\tconst candidate = chunk.candidates?.[0];\n\t\t\t\tif (candidate?.content?.parts) {\n\t\t\t\t\tfor (const part of candidate.content.parts) {\n\t\t\t\t\t\tif (part.text !== undefined) {\n\t\t\t\t\t\t\tconst isThinking = isThinkingPart(part);\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\t(isThinking && currentBlock.type !== \"thinking\") ||\n\t\t\t\t\t\t\t\t(!isThinking && currentBlock.type !== \"text\")\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blocks.length - 1,\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (isThinking) {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\", thinkingSignature: undefined };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\t\tcurrentBlock.thinking += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature = retainThoughtSignature(\n\t\t\t\t\t\t\t\t\tcurrentBlock.thinkingSignature,\n\t\t\t\t\t\t\t\t\tpart.thoughtSignature,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcurrentBlock.text += part.text;\n\t\t\t\t\t\t\t\tcurrentBlock.textSignature = retainThoughtSignature(\n\t\t\t\t\t\t\t\t\tcurrentBlock.textSignature,\n\t\t\t\t\t\t\t\t\tpart.thoughtSignature,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta: part.text,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (part.functionCall) {\n\t\t\t\t\t\t\tif (currentBlock) {\n\t\t\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// Generate unique ID if not provided or if it's a duplicate\n\t\t\t\t\t\t\tconst providedId = part.functionCall.id;\n\t\t\t\t\t\t\tconst needsNewId =\n\t\t\t\t\t\t\t\t!providedId || output.content.some((b) => b.type === \"toolCall\" && b.id === providedId);\n\t\t\t\t\t\t\tconst toolCallId = needsNewId\n\t\t\t\t\t\t\t\t? `${part.functionCall.name}_${Date.now()}_${++toolCallCounter}`\n\t\t\t\t\t\t\t\t: providedId;\n\n\t\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\tid: toolCallId,\n\t\t\t\t\t\t\t\tname: part.functionCall.name || \"\",\n\t\t\t\t\t\t\t\targuments: (part.functionCall.args as Record<string, any>) ?? {},\n\t\t\t\t\t\t\t\t...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\toutput.content.push(toolCall);\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (candidate?.finishReason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(candidate.finishReason);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\")) {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (chunk.usageMetadata) {\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.promptTokenCount || 0) - (chunk.usageMetadata.cachedContentTokenCount || 0),\n\t\t\t\t\t\toutput:\n\t\t\t\t\t\t\t(chunk.usageMetadata.candidatesTokenCount || 0) + (chunk.usageMetadata.thoughtsTokenCount || 0),\n\t\t\t\t\t\tcacheRead: chunk.usageMetadata.cachedContentTokenCount || 0,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: chunk.usageMetadata.totalTokenCount || 0,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (currentBlock) {\n\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\t// Remove internal index property used during streaming\n\t\t\tfor (const block of output.content) {\n\t\t\t\tif (\"index\" in block) {\n\t\t\t\t\tdelete (block as { index?: number }).index;\n\t\t\t\t}\n\t\t\t}\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nexport const streamSimpleGoogle: StreamFunction<\"google-generative-ai\", SimpleStreamOptions> = (\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tif (!options?.reasoning) {\n\t\treturn streamGoogle(model, context, { ...base, thinking: { enabled: false } } satisfies GoogleOptions);\n\t}\n\n\tconst clampedReasoning = clampThinkingLevel(model, options.reasoning);\n\tconst effort = (clampedReasoning === \"off\" ? \"high\" : clampedReasoning) as ClampedThinkingLevel;\n\tconst googleModel = model as Model<\"google-generative-ai\">;\n\n\tif (isGemini3ProModel(googleModel) || isGemini3FlashModel(googleModel) || isGemma4Model(googleModel)) {\n\t\treturn streamGoogle(model, context, {\n\t\t\t...base,\n\t\t\tthinking: {\n\t\t\t\tenabled: true,\n\t\t\t\tlevel: getThinkingLevel(effort, googleModel),\n\t\t\t},\n\t\t} satisfies GoogleOptions);\n\t}\n\n\treturn streamGoogle(model, context, {\n\t\t...base,\n\t\tthinking: {\n\t\t\tenabled: true,\n\t\t\tbudgetTokens: getGoogleBudget(googleModel, effort, options.thinkingBudgets),\n\t\t},\n\t} satisfies GoogleOptions);\n};\n\nfunction createClient(\n\tmodel: Model<\"google-generative-ai\">,\n\tapiKey?: string,\n\toptionsHeaders?: Record<string, string>,\n): GoogleGenAI {\n\tconst httpOptions: { baseUrl?: string; apiVersion?: string; headers?: Record<string, string> } = {};\n\tif (model.baseUrl) {\n\t\thttpOptions.baseUrl = model.baseUrl;\n\t\thttpOptions.apiVersion = \"\"; // baseUrl already includes version path, don't append\n\t}\n\tif (model.headers || optionsHeaders) {\n\t\thttpOptions.headers = { ...model.headers, ...optionsHeaders };\n\t}\n\n\treturn new GoogleGenAI({\n\t\tapiKey,\n\t\thttpOptions: Object.keys(httpOptions).length > 0 ? httpOptions : undefined,\n\t});\n}\n\nfunction buildParams(\n\tmodel: Model<\"google-generative-ai\">,\n\tcontext: Context,\n\toptions: GoogleOptions = {},\n): GenerateContentParameters {\n\tconst contents = convertMessages(model, context);\n\n\tconst generationConfig: GenerateContentConfig = {};\n\tif (options.temperature !== undefined) {\n\t\tgenerationConfig.temperature = options.temperature;\n\t}\n\tif (options.maxTokens !== undefined) {\n\t\tgenerationConfig.maxOutputTokens = options.maxTokens;\n\t}\n\n\tconst config: GenerateContentConfig = {\n\t\t...(Object.keys(generationConfig).length > 0 && generationConfig),\n\t\t...(context.systemPrompt && { systemInstruction: sanitizeSurrogates(context.systemPrompt) }),\n\t\t...(context.tools && context.tools.length > 0 && { tools: convertTools(context.tools) }),\n\t};\n\n\tif (context.tools && context.tools.length > 0 && options.toolChoice) {\n\t\tconfig.toolConfig = {\n\t\t\tfunctionCallingConfig: {\n\t\t\t\tmode: mapToolChoice(options.toolChoice),\n\t\t\t},\n\t\t};\n\t} else {\n\t\tconfig.toolConfig = undefined;\n\t}\n\n\tif (options.thinking?.enabled && model.reasoning) {\n\t\tconst thinkingConfig: ThinkingConfig = { includeThoughts: true };\n\t\tif (options.thinking.level !== undefined) {\n\t\t\t// Cast to any since our GoogleThinkingLevel mirrors Google's ThinkingLevel enum values\n\t\t\tthinkingConfig.thinkingLevel = options.thinking.level as any;\n\t\t} else if (options.thinking.budgetTokens !== undefined) {\n\t\t\tthinkingConfig.thinkingBudget = options.thinking.budgetTokens;\n\t\t}\n\t\tconfig.thinkingConfig = thinkingConfig;\n\t} else if (model.reasoning && options.thinking && !options.thinking.enabled) {\n\t\tconfig.thinkingConfig = getDisabledThinkingConfig(model);\n\t}\n\n\tif (options.signal) {\n\t\tif (options.signal.aborted) {\n\t\t\tthrow new Error(\"Request aborted\");\n\t\t}\n\t\tconfig.abortSignal = options.signal;\n\t}\n\n\tconst params: GenerateContentParameters = {\n\t\tmodel: model.id,\n\t\tcontents,\n\t\tconfig,\n\t};\n\n\treturn params;\n}\n\ntype ClampedThinkingLevel = Exclude<ThinkingLevel, \"xhigh\">;\n\nfunction isGemma4Model(model: Model<\"google-generative-ai\">): boolean {\n\treturn /gemma-?4/.test(model.id.toLowerCase());\n}\n\nfunction isGemini3ProModel(model: Model<\"google-generative-ai\">): boolean {\n\treturn /gemini-3(?:\\.\\d+)?-pro/.test(model.id.toLowerCase());\n}\n\nfunction isGemini3FlashModel(model: Model<\"google-generative-ai\">): boolean {\n\treturn /gemini-3(?:\\.\\d+)?-flash/.test(model.id.toLowerCase());\n}\n\nfunction getDisabledThinkingConfig(model: Model<\"google-generative-ai\">): ThinkingConfig {\n\t// Google docs: Gemini 3.1 Pro cannot disable thinking, and Gemini 3 Flash / Flash-Lite\n\t// do not support full thinking-off either. For Gemini 3 models, use the lowest supported\n\t// thinkingLevel without includeThoughts so hidden thinking remains invisible to pi.\n\tif (isGemini3ProModel(model)) {\n\t\treturn { thinkingLevel: \"LOW\" as any };\n\t}\n\tif (isGemini3FlashModel(model)) {\n\t\treturn { thinkingLevel: \"MINIMAL\" as any };\n\t}\n\tif (isGemma4Model(model)) {\n\t\treturn { thinkingLevel: \"MINIMAL\" as any };\n\t}\n\n\t// Gemini 2.x supports disabling via thinkingBudget = 0.\n\treturn { thinkingBudget: 0 };\n}\n\nfunction getThinkingLevel(effort: ClampedThinkingLevel, model: Model<\"google-generative-ai\">): GoogleThinkingLevel {\n\tif (isGemini3ProModel(model)) {\n\t\tswitch (effort) {\n\t\t\tcase \"minimal\":\n\t\t\tcase \"low\":\n\t\t\t\treturn \"LOW\";\n\t\t\tcase \"medium\":\n\t\t\tcase \"high\":\n\t\t\t\treturn \"HIGH\";\n\t\t}\n\t}\n\tif (isGemma4Model(model)) {\n\t\tswitch (effort) {\n\t\t\tcase \"minimal\":\n\t\t\tcase \"low\":\n\t\t\t\treturn \"MINIMAL\";\n\t\t\tcase \"medium\":\n\t\t\tcase \"high\":\n\t\t\t\treturn \"HIGH\";\n\t\t}\n\t}\n\tswitch (effort) {\n\t\tcase \"minimal\":\n\t\t\treturn \"MINIMAL\";\n\t\tcase \"low\":\n\t\t\treturn \"LOW\";\n\t\tcase \"medium\":\n\t\t\treturn \"MEDIUM\";\n\t\tcase \"high\":\n\t\t\treturn \"HIGH\";\n\t}\n}\n\nfunction getGoogleBudget(\n\tmodel: Model<\"google-generative-ai\">,\n\teffort: ClampedThinkingLevel,\n\tcustomBudgets?: ThinkingBudgets,\n): number {\n\tif (customBudgets?.[effort] !== undefined) {\n\t\treturn customBudgets[effort]!;\n\t}\n\n\tif (model.id.includes(\"2.5-pro\")) {\n\t\tconst budgets: Record<ClampedThinkingLevel, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 32768,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash-lite\")) {\n\t\tconst budgets: Record<ClampedThinkingLevel, number> = {\n\t\t\tminimal: 512,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\tif (model.id.includes(\"2.5-flash\")) {\n\t\tconst budgets: Record<ClampedThinkingLevel, number> = {\n\t\t\tminimal: 128,\n\t\t\tlow: 2048,\n\t\t\tmedium: 8192,\n\t\t\thigh: 24576,\n\t\t};\n\t\treturn budgets[effort];\n\t}\n\n\treturn -1;\n}\n"]}
@@ -0,0 +1,3 @@
1
+ import type { ImagesFunction, ImagesOptions } from "../../types.js";
2
+ export declare const generateImagesOpenRouter: ImagesFunction<"openrouter-images", ImagesOptions>;
3
+ //# sourceMappingURL=openrouter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openrouter.d.ts","sourceRoot":"","sources":["../../../src/providers/images/openrouter.ts"],"names":[],"mappings":"AASA,OAAO,KAAK,EAIX,cAAc,EAEd,aAAa,EAEb,MAAM,gBAAgB,CAAC;AAoBxB,eAAO,MAAM,wBAAwB,EAAE,cAAc,CAAC,mBAAmB,EAAE,aAAa,CAmEvF,CAAC","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletion,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionCreateParamsNonStreaming,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../../env-api-keys.js\";\nimport type {\n\tAssistantImages,\n\tImageContent,\n\tImagesContext,\n\tImagesFunction,\n\tImagesModel,\n\tImagesOptions,\n\tTextContent,\n} from \"../../types.js\";\nimport { headersToRecord } from \"../../utils/headers.js\";\nimport { sanitizeSurrogates } from \"../../utils/sanitize-unicode.js\";\n\ninterface OpenRouterGeneratedImage {\n\timage_url?: string | { url?: string };\n}\n\ntype OpenRouterImageGenerationMessage = ChatCompletion[\"choices\"][number][\"message\"] & {\n\timages?: OpenRouterGeneratedImage[];\n};\n\ntype OpenRouterImageGenerationChoice = ChatCompletion[\"choices\"][number] & {\n\tmessage: OpenRouterImageGenerationMessage;\n};\n\ntype OpenRouterImageGenerationResponse = ChatCompletion & {\n\tchoices: OpenRouterImageGenerationChoice[];\n};\n\nexport const generateImagesOpenRouter: ImagesFunction<\"openrouter-images\", ImagesOptions> = async (\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tcontext: ImagesContext,\n\toptions?: ImagesOptions,\n) => {\n\tconst output: AssistantImages = {\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\toutput: [],\n\t\tstopReason: \"stop\",\n\t\ttimestamp: Date.now(),\n\t};\n\n\ttry {\n\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\t\tif (!apiKey) {\n\t\t\tthrow new Error(`No API key available for provider: ${model.provider}`);\n\t\t}\n\t\tconst client = createClient(model, apiKey, options?.headers);\n\t\tlet params = buildParams(model, context);\n\t\tconst nextParams = await options?.onPayload?.(params, model);\n\t\tif (nextParams !== undefined) {\n\t\t\tparams = nextParams as typeof params;\n\t\t}\n\t\tconst requestOptions = {\n\t\t\t...(options?.signal ? { signal: options.signal } : {}),\n\t\t\t...(options?.timeoutMs !== undefined ? { timeout: options.timeoutMs } : {}),\n\t\t\t...(options?.maxRetries !== undefined ? { maxRetries: options.maxRetries } : {}),\n\t\t};\n\t\tconst { data: response, response: rawResponse } = await client.chat.completions\n\t\t\t.create(params as unknown as ChatCompletionCreateParamsNonStreaming, requestOptions)\n\t\t\t.withResponse();\n\t\tawait options?.onResponse?.({ status: rawResponse.status, headers: headersToRecord(rawResponse.headers) }, model);\n\n\t\tconst imageResponse = response as OpenRouterImageGenerationResponse;\n\t\toutput.responseId = imageResponse.id;\n\t\tif (imageResponse.usage) {\n\t\t\toutput.usage = parseUsage(imageResponse.usage, model);\n\t\t}\n\n\t\tconst choice = imageResponse.choices[0];\n\t\tif (choice) {\n\t\t\tconst content = choice.message.content;\n\t\t\tif (typeof content === \"string\" && content.length > 0) {\n\t\t\t\toutput.output.push({ type: \"text\", text: content } satisfies TextContent);\n\t\t\t}\n\n\t\t\tfor (const image of choice.message.images ?? []) {\n\t\t\t\tconst imageUrl = typeof image.image_url === \"string\" ? image.image_url : image.image_url?.url;\n\t\t\t\tif (!imageUrl?.startsWith(\"data:\")) continue;\n\t\t\t\tconst matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);\n\t\t\t\tif (!matches) continue;\n\t\t\t\toutput.output.push({\n\t\t\t\t\ttype: \"image\",\n\t\t\t\t\tmimeType: matches[1],\n\t\t\t\t\tdata: matches[2],\n\t\t\t\t} satisfies ImageContent);\n\t\t\t}\n\t\t}\n\n\t\treturn output;\n\t} catch (error) {\n\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\treturn output;\n\t}\n};\n\nfunction createClient(\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tapiKey: string,\n\toptionsHeaders?: Record<string, string>,\n): OpenAI {\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: {\n\t\t\t...model.headers,\n\t\t\t...optionsHeaders,\n\t\t},\n\t});\n}\n\ntype OpenRouterImagesCreateParams = Omit<ChatCompletionCreateParamsNonStreaming, \"modalities\"> & {\n\tmodalities: Array<\"image\" | \"text\">;\n};\n\nfunction buildParams(model: ImagesModel<\"openrouter-images\">, context: ImagesContext): OpenRouterImagesCreateParams {\n\tconst content: ChatCompletionContentPart[] = context.input.map((item): ChatCompletionContentPart => {\n\t\tif (item.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image_url\",\n\t\t\timage_url: {\n\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t},\n\t\t} satisfies ChatCompletionContentPartImage;\n\t});\n\n\treturn {\n\t\tmodel: model.id,\n\t\tmessages: [\n\t\t\t{\n\t\t\t\trole: \"user\" as const,\n\t\t\t\tcontent,\n\t\t\t},\n\t\t],\n\t\tstream: false,\n\t\tmodalities: model.output.includes(\"text\") ? [\"image\", \"text\"] : [\"image\"],\n\t};\n}\n\nfunction parseUsage(\n\trawUsage: {\n\t\tprompt_tokens?: number;\n\t\tcompletion_tokens?: number;\n\t\tprompt_tokens_details?: { cached_tokens?: number; cache_write_tokens?: number };\n\t},\n\tmodel: ImagesModel<\"openrouter-images\">,\n) {\n\tconst promptTokens = rawUsage.prompt_tokens || 0;\n\tconst reportedCachedTokens = rawUsage.prompt_tokens_details?.cached_tokens || 0;\n\tconst cacheWriteTokens = rawUsage.prompt_tokens_details?.cache_write_tokens || 0;\n\tconst cacheReadTokens =\n\t\tcacheWriteTokens > 0 ? Math.max(0, reportedCachedTokens - cacheWriteTokens) : reportedCachedTokens;\n\tconst input = Math.max(0, promptTokens - cacheReadTokens - cacheWriteTokens);\n\tconst output = rawUsage.completion_tokens || 0;\n\tconst usage = {\n\t\tinput,\n\t\toutput,\n\t\tcacheRead: cacheReadTokens,\n\t\tcacheWrite: cacheWriteTokens,\n\t\ttotalTokens: input + output + cacheReadTokens + cacheWriteTokens,\n\t\tcost: {\n\t\t\tinput: (model.cost.input / 1000000) * input,\n\t\t\toutput: (model.cost.output / 1000000) * output,\n\t\t\tcacheRead: (model.cost.cacheRead / 1000000) * cacheReadTokens,\n\t\t\tcacheWrite: (model.cost.cacheWrite / 1000000) * cacheWriteTokens,\n\t\t\ttotal: 0,\n\t\t},\n\t};\n\tusage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;\n\treturn usage;\n}\n"]}
@@ -0,0 +1,129 @@
1
+ import OpenAI from "openai";
2
+ import { getEnvApiKey } from "../../env-api-keys.js";
3
+ import { headersToRecord } from "../../utils/headers.js";
4
+ import { sanitizeSurrogates } from "../../utils/sanitize-unicode.js";
5
+ export const generateImagesOpenRouter = async (model, context, options) => {
6
+ const output = {
7
+ api: model.api,
8
+ provider: model.provider,
9
+ model: model.id,
10
+ output: [],
11
+ stopReason: "stop",
12
+ timestamp: Date.now(),
13
+ };
14
+ try {
15
+ const apiKey = options?.apiKey || getEnvApiKey(model.provider);
16
+ if (!apiKey) {
17
+ throw new Error(`No API key available for provider: ${model.provider}`);
18
+ }
19
+ const client = createClient(model, apiKey, options?.headers);
20
+ let params = buildParams(model, context);
21
+ const nextParams = await options?.onPayload?.(params, model);
22
+ if (nextParams !== undefined) {
23
+ params = nextParams;
24
+ }
25
+ const requestOptions = {
26
+ ...(options?.signal ? { signal: options.signal } : {}),
27
+ ...(options?.timeoutMs !== undefined ? { timeout: options.timeoutMs } : {}),
28
+ ...(options?.maxRetries !== undefined ? { maxRetries: options.maxRetries } : {}),
29
+ };
30
+ const { data: response, response: rawResponse } = await client.chat.completions
31
+ .create(params, requestOptions)
32
+ .withResponse();
33
+ await options?.onResponse?.({ status: rawResponse.status, headers: headersToRecord(rawResponse.headers) }, model);
34
+ const imageResponse = response;
35
+ output.responseId = imageResponse.id;
36
+ if (imageResponse.usage) {
37
+ output.usage = parseUsage(imageResponse.usage, model);
38
+ }
39
+ const choice = imageResponse.choices[0];
40
+ if (choice) {
41
+ const content = choice.message.content;
42
+ if (typeof content === "string" && content.length > 0) {
43
+ output.output.push({ type: "text", text: content });
44
+ }
45
+ for (const image of choice.message.images ?? []) {
46
+ const imageUrl = typeof image.image_url === "string" ? image.image_url : image.image_url?.url;
47
+ if (!imageUrl?.startsWith("data:"))
48
+ continue;
49
+ const matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);
50
+ if (!matches)
51
+ continue;
52
+ output.output.push({
53
+ type: "image",
54
+ mimeType: matches[1],
55
+ data: matches[2],
56
+ });
57
+ }
58
+ }
59
+ return output;
60
+ }
61
+ catch (error) {
62
+ output.stopReason = options?.signal?.aborted ? "aborted" : "error";
63
+ output.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
64
+ return output;
65
+ }
66
+ };
67
+ function createClient(model, apiKey, optionsHeaders) {
68
+ return new OpenAI({
69
+ apiKey,
70
+ baseURL: model.baseUrl,
71
+ dangerouslyAllowBrowser: true,
72
+ defaultHeaders: {
73
+ ...model.headers,
74
+ ...optionsHeaders,
75
+ },
76
+ });
77
+ }
78
+ function buildParams(model, context) {
79
+ const content = context.input.map((item) => {
80
+ if (item.type === "text") {
81
+ return {
82
+ type: "text",
83
+ text: sanitizeSurrogates(item.text),
84
+ };
85
+ }
86
+ return {
87
+ type: "image_url",
88
+ image_url: {
89
+ url: `data:${item.mimeType};base64,${item.data}`,
90
+ },
91
+ };
92
+ });
93
+ return {
94
+ model: model.id,
95
+ messages: [
96
+ {
97
+ role: "user",
98
+ content,
99
+ },
100
+ ],
101
+ stream: false,
102
+ modalities: model.output.includes("text") ? ["image", "text"] : ["image"],
103
+ };
104
+ }
105
+ function parseUsage(rawUsage, model) {
106
+ const promptTokens = rawUsage.prompt_tokens || 0;
107
+ const reportedCachedTokens = rawUsage.prompt_tokens_details?.cached_tokens || 0;
108
+ const cacheWriteTokens = rawUsage.prompt_tokens_details?.cache_write_tokens || 0;
109
+ const cacheReadTokens = cacheWriteTokens > 0 ? Math.max(0, reportedCachedTokens - cacheWriteTokens) : reportedCachedTokens;
110
+ const input = Math.max(0, promptTokens - cacheReadTokens - cacheWriteTokens);
111
+ const output = rawUsage.completion_tokens || 0;
112
+ const usage = {
113
+ input,
114
+ output,
115
+ cacheRead: cacheReadTokens,
116
+ cacheWrite: cacheWriteTokens,
117
+ totalTokens: input + output + cacheReadTokens + cacheWriteTokens,
118
+ cost: {
119
+ input: (model.cost.input / 1000000) * input,
120
+ output: (model.cost.output / 1000000) * output,
121
+ cacheRead: (model.cost.cacheRead / 1000000) * cacheReadTokens,
122
+ cacheWrite: (model.cost.cacheWrite / 1000000) * cacheWriteTokens,
123
+ total: 0,
124
+ },
125
+ };
126
+ usage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;
127
+ return usage;
128
+ }
129
+ //# sourceMappingURL=openrouter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openrouter.js","sourceRoot":"","sources":["../../../src/providers/images/openrouter.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAQ5B,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAUrD,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AACzD,OAAO,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AAkBrE,MAAM,CAAC,MAAM,wBAAwB,GAAuD,KAAK,EAChG,KAAuC,EACvC,OAAsB,EACtB,OAAuB,EACtB,EAAE,CAAC;IACJ,MAAM,MAAM,GAAoB;QAC/B,GAAG,EAAE,KAAK,CAAC,GAAG;QACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;QACxB,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,MAAM,EAAE,EAAE;QACV,UAAU,EAAE,MAAM;QAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;IAEF,IAAI,CAAC;QACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/D,IAAI,CAAC,MAAM,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CAAC,sCAAsC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACzE,CAAC;QACD,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAC7D,IAAI,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QACzC,MAAM,UAAU,GAAG,MAAM,OAAO,EAAE,SAAS,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,GAAG,UAA2B,CAAC;QACtC,CAAC;QACD,MAAM,cAAc,GAAG;YACtB,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YACtD,GAAG,CAAC,OAAO,EAAE,SAAS,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YAC3E,GAAG,CAAC,OAAO,EAAE,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SAChF,CAAC;QACF,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW;aAC7E,MAAM,CAAC,MAA2D,EAAE,cAAc,CAAC;aACnF,YAAY,EAAE,CAAC;QACjB,MAAM,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,MAAM,EAAE,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC,OAAO,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;QAElH,MAAM,aAAa,GAAG,QAA6C,CAAC;QACpE,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,EAAE,CAAC;QACrC,IAAI,aAAa,CAAC,KAAK,EAAE,CAAC;YACzB,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QACvD,CAAC;QAED,MAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACxC,IAAI,MAAM,EAAE,CAAC;YACZ,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;YACvC,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAwB,CAAC,CAAC;YAC3E,CAAC;YAED,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,IAAI,EAAE,EAAE,CAAC;gBACjD,MAAM,QAAQ,GAAG,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,GAAG,CAAC;gBAC9F,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC,OAAO,CAAC;oBAAE,SAAS;gBAC7C,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gBAC7D,IAAI,CAAC,OAAO;oBAAE,SAAS;gBACvB,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;oBAClB,IAAI,EAAE,OAAO;oBACb,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;oBACpB,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;iBACO,CAAC,CAAC;YAC3B,CAAC;QACF,CAAC;QAED,OAAO,MAAM,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;QACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QACrF,OAAO,MAAM,CAAC;IACf,CAAC;AAAA,CACD,CAAC;AAEF,SAAS,YAAY,CACpB,KAAuC,EACvC,MAAc,EACd,cAAuC,EAC9B;IACT,OAAO,IAAI,MAAM,CAAC;QACjB,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc,EAAE;YACf,GAAG,KAAK,CAAC,OAAO;YAChB,GAAG,cAAc;SACjB;KACD,CAAC,CAAC;AAAA,CACH;AAMD,SAAS,WAAW,CAAC,KAAuC,EAAE,OAAsB,EAAgC;IACnH,MAAM,OAAO,GAAgC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE,CAAC;QACnG,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC1B,OAAO;gBACN,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;aACK,CAAC;QAC3C,CAAC;QACD,OAAO;YACN,IAAI,EAAE,WAAW;YACjB,SAAS,EAAE;gBACV,GAAG,EAAE,QAAQ,IAAI,CAAC,QAAQ,WAAW,IAAI,CAAC,IAAI,EAAE;aAChD;SACwC,CAAC;IAAA,CAC3C,CAAC,CAAC;IAEH,OAAO;QACN,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ,EAAE;YACT;gBACC,IAAI,EAAE,MAAe;gBACrB,OAAO;aACP;SACD;QACD,MAAM,EAAE,KAAK;QACb,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;KACzE,CAAC;AAAA,CACF;AAED,SAAS,UAAU,CAClB,QAIC,EACD,KAAuC,EACtC;IACD,MAAM,YAAY,GAAG,QAAQ,CAAC,aAAa,IAAI,CAAC,CAAC;IACjD,MAAM,oBAAoB,GAAG,QAAQ,CAAC,qBAAqB,EAAE,aAAa,IAAI,CAAC,CAAC;IAChF,MAAM,gBAAgB,GAAG,QAAQ,CAAC,qBAAqB,EAAE,kBAAkB,IAAI,CAAC,CAAC;IACjF,MAAM,eAAe,GACpB,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,oBAAoB,GAAG,gBAAgB,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC;IACpG,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,GAAG,eAAe,GAAG,gBAAgB,CAAC,CAAC;IAC7E,MAAM,MAAM,GAAG,QAAQ,CAAC,iBAAiB,IAAI,CAAC,CAAC;IAC/C,MAAM,KAAK,GAAG;QACb,KAAK;QACL,MAAM;QACN,SAAS,EAAE,eAAe;QAC1B,UAAU,EAAE,gBAAgB;QAC5B,WAAW,EAAE,KAAK,GAAG,MAAM,GAAG,eAAe,GAAG,gBAAgB;QAChE,IAAI,EAAE;YACL,KAAK,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,KAAK;YAC3C,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,MAAM;YAC9C,SAAS,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,eAAe;YAC7D,UAAU,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,GAAG,gBAAgB;YAChE,KAAK,EAAE,CAAC;SACR;KACD,CAAC;IACF,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;IACvG,OAAO,KAAK,CAAC;AAAA,CACb","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletion,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionCreateParamsNonStreaming,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../../env-api-keys.js\";\nimport type {\n\tAssistantImages,\n\tImageContent,\n\tImagesContext,\n\tImagesFunction,\n\tImagesModel,\n\tImagesOptions,\n\tTextContent,\n} from \"../../types.js\";\nimport { headersToRecord } from \"../../utils/headers.js\";\nimport { sanitizeSurrogates } from \"../../utils/sanitize-unicode.js\";\n\ninterface OpenRouterGeneratedImage {\n\timage_url?: string | { url?: string };\n}\n\ntype OpenRouterImageGenerationMessage = ChatCompletion[\"choices\"][number][\"message\"] & {\n\timages?: OpenRouterGeneratedImage[];\n};\n\ntype OpenRouterImageGenerationChoice = ChatCompletion[\"choices\"][number] & {\n\tmessage: OpenRouterImageGenerationMessage;\n};\n\ntype OpenRouterImageGenerationResponse = ChatCompletion & {\n\tchoices: OpenRouterImageGenerationChoice[];\n};\n\nexport const generateImagesOpenRouter: ImagesFunction<\"openrouter-images\", ImagesOptions> = async (\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tcontext: ImagesContext,\n\toptions?: ImagesOptions,\n) => {\n\tconst output: AssistantImages = {\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\toutput: [],\n\t\tstopReason: \"stop\",\n\t\ttimestamp: Date.now(),\n\t};\n\n\ttry {\n\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\t\tif (!apiKey) {\n\t\t\tthrow new Error(`No API key available for provider: ${model.provider}`);\n\t\t}\n\t\tconst client = createClient(model, apiKey, options?.headers);\n\t\tlet params = buildParams(model, context);\n\t\tconst nextParams = await options?.onPayload?.(params, model);\n\t\tif (nextParams !== undefined) {\n\t\t\tparams = nextParams as typeof params;\n\t\t}\n\t\tconst requestOptions = {\n\t\t\t...(options?.signal ? { signal: options.signal } : {}),\n\t\t\t...(options?.timeoutMs !== undefined ? { timeout: options.timeoutMs } : {}),\n\t\t\t...(options?.maxRetries !== undefined ? { maxRetries: options.maxRetries } : {}),\n\t\t};\n\t\tconst { data: response, response: rawResponse } = await client.chat.completions\n\t\t\t.create(params as unknown as ChatCompletionCreateParamsNonStreaming, requestOptions)\n\t\t\t.withResponse();\n\t\tawait options?.onResponse?.({ status: rawResponse.status, headers: headersToRecord(rawResponse.headers) }, model);\n\n\t\tconst imageResponse = response as OpenRouterImageGenerationResponse;\n\t\toutput.responseId = imageResponse.id;\n\t\tif (imageResponse.usage) {\n\t\t\toutput.usage = parseUsage(imageResponse.usage, model);\n\t\t}\n\n\t\tconst choice = imageResponse.choices[0];\n\t\tif (choice) {\n\t\t\tconst content = choice.message.content;\n\t\t\tif (typeof content === \"string\" && content.length > 0) {\n\t\t\t\toutput.output.push({ type: \"text\", text: content } satisfies TextContent);\n\t\t\t}\n\n\t\t\tfor (const image of choice.message.images ?? []) {\n\t\t\t\tconst imageUrl = typeof image.image_url === \"string\" ? image.image_url : image.image_url?.url;\n\t\t\t\tif (!imageUrl?.startsWith(\"data:\")) continue;\n\t\t\t\tconst matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);\n\t\t\t\tif (!matches) continue;\n\t\t\t\toutput.output.push({\n\t\t\t\t\ttype: \"image\",\n\t\t\t\t\tmimeType: matches[1],\n\t\t\t\t\tdata: matches[2],\n\t\t\t\t} satisfies ImageContent);\n\t\t\t}\n\t\t}\n\n\t\treturn output;\n\t} catch (error) {\n\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\treturn output;\n\t}\n};\n\nfunction createClient(\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tapiKey: string,\n\toptionsHeaders?: Record<string, string>,\n): OpenAI {\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: {\n\t\t\t...model.headers,\n\t\t\t...optionsHeaders,\n\t\t},\n\t});\n}\n\ntype OpenRouterImagesCreateParams = Omit<ChatCompletionCreateParamsNonStreaming, \"modalities\"> & {\n\tmodalities: Array<\"image\" | \"text\">;\n};\n\nfunction buildParams(model: ImagesModel<\"openrouter-images\">, context: ImagesContext): OpenRouterImagesCreateParams {\n\tconst content: ChatCompletionContentPart[] = context.input.map((item): ChatCompletionContentPart => {\n\t\tif (item.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image_url\",\n\t\t\timage_url: {\n\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t},\n\t\t} satisfies ChatCompletionContentPartImage;\n\t});\n\n\treturn {\n\t\tmodel: model.id,\n\t\tmessages: [\n\t\t\t{\n\t\t\t\trole: \"user\" as const,\n\t\t\t\tcontent,\n\t\t\t},\n\t\t],\n\t\tstream: false,\n\t\tmodalities: model.output.includes(\"text\") ? [\"image\", \"text\"] : [\"image\"],\n\t};\n}\n\nfunction parseUsage(\n\trawUsage: {\n\t\tprompt_tokens?: number;\n\t\tcompletion_tokens?: number;\n\t\tprompt_tokens_details?: { cached_tokens?: number; cache_write_tokens?: number };\n\t},\n\tmodel: ImagesModel<\"openrouter-images\">,\n) {\n\tconst promptTokens = rawUsage.prompt_tokens || 0;\n\tconst reportedCachedTokens = rawUsage.prompt_tokens_details?.cached_tokens || 0;\n\tconst cacheWriteTokens = rawUsage.prompt_tokens_details?.cache_write_tokens || 0;\n\tconst cacheReadTokens =\n\t\tcacheWriteTokens > 0 ? Math.max(0, reportedCachedTokens - cacheWriteTokens) : reportedCachedTokens;\n\tconst input = Math.max(0, promptTokens - cacheReadTokens - cacheWriteTokens);\n\tconst output = rawUsage.completion_tokens || 0;\n\tconst usage = {\n\t\tinput,\n\t\toutput,\n\t\tcacheRead: cacheReadTokens,\n\t\tcacheWrite: cacheWriteTokens,\n\t\ttotalTokens: input + output + cacheReadTokens + cacheWriteTokens,\n\t\tcost: {\n\t\t\tinput: (model.cost.input / 1000000) * input,\n\t\t\toutput: (model.cost.output / 1000000) * output,\n\t\t\tcacheRead: (model.cost.cacheRead / 1000000) * cacheReadTokens,\n\t\t\tcacheWrite: (model.cost.cacheWrite / 1000000) * cacheWriteTokens,\n\t\t\ttotal: 0,\n\t\t},\n\t};\n\tusage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;\n\treturn usage;\n}\n"]}
@@ -0,0 +1,4 @@
1
+ import type { ImagesFunction, ImagesOptions } from "../../types.js";
2
+ export declare const generateImagesOpenRouter: ImagesFunction<"openrouter-images", ImagesOptions>;
3
+ export declare function registerBuiltInImagesApiProviders(): void;
4
+ //# sourceMappingURL=register-builtins.d.ts.map