@openrouter/ai-sdk-provider 2.0.2 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2410,7 +2410,7 @@ function getCacheControl(providerMetadata) {
2410
2410
  return (_c = (_b16 = (_a16 = openrouter == null ? void 0 : openrouter.cacheControl) != null ? _a16 : openrouter == null ? void 0 : openrouter.cache_control) != null ? _b16 : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
2411
2411
  }
2412
2412
  function convertToOpenRouterChatMessages(prompt) {
2413
- var _a16, _b16, _c, _d, _e, _f, _g;
2413
+ var _a16, _b16, _c, _d, _e, _f, _g, _h;
2414
2414
  const messages = [];
2415
2415
  for (const { role, content, providerOptions } of prompt) {
2416
2416
  switch (role) {
@@ -2439,42 +2439,46 @@ function convertToOpenRouterChatMessages(prompt) {
2439
2439
  break;
2440
2440
  }
2441
2441
  const messageCacheControl = getCacheControl(providerOptions);
2442
+ let lastTextPartIndex = -1;
2443
+ for (let i = content.length - 1; i >= 0; i--) {
2444
+ if (((_c = content[i]) == null ? void 0 : _c.type) === "text") {
2445
+ lastTextPartIndex = i;
2446
+ break;
2447
+ }
2448
+ }
2442
2449
  const contentParts = content.map(
2443
- (part) => {
2444
- var _a17, _b17, _c2, _d2, _e2, _f2, _g2;
2445
- const cacheControl = (_a17 = getCacheControl(part.providerOptions)) != null ? _a17 : messageCacheControl;
2450
+ (part, index) => {
2451
+ var _a17, _b17, _c2, _d2, _e2, _f2;
2452
+ const isLastTextPart = part.type === "text" && index === lastTextPartIndex;
2453
+ const partCacheControl = getCacheControl(part.providerOptions);
2454
+ const cacheControl = part.type === "text" ? partCacheControl != null ? partCacheControl : isLastTextPart ? messageCacheControl : void 0 : partCacheControl;
2446
2455
  switch (part.type) {
2447
2456
  case "text":
2448
- return {
2457
+ return __spreadValues({
2449
2458
  type: "text",
2450
- text: part.text,
2451
- // For text parts, only use part-specific cache control
2452
- cache_control: cacheControl
2453
- };
2459
+ text: part.text
2460
+ }, cacheControl && { cache_control: cacheControl });
2454
2461
  case "file": {
2455
- if ((_b17 = part.mediaType) == null ? void 0 : _b17.startsWith("image/")) {
2462
+ if ((_a17 = part.mediaType) == null ? void 0 : _a17.startsWith("image/")) {
2456
2463
  const url = getFileUrl({
2457
2464
  part,
2458
2465
  defaultMediaType: "image/jpeg"
2459
2466
  });
2460
- return {
2467
+ return __spreadValues({
2461
2468
  type: "image_url",
2462
2469
  image_url: {
2463
2470
  url
2464
- },
2465
- // For image parts, use part-specific or message-level cache control
2466
- cache_control: cacheControl
2467
- };
2471
+ }
2472
+ }, cacheControl && { cache_control: cacheControl });
2468
2473
  }
2469
- if ((_c2 = part.mediaType) == null ? void 0 : _c2.startsWith("audio/")) {
2470
- return {
2474
+ if ((_b17 = part.mediaType) == null ? void 0 : _b17.startsWith("audio/")) {
2475
+ return __spreadValues({
2471
2476
  type: "input_audio",
2472
- input_audio: getInputAudioData(part),
2473
- cache_control: cacheControl
2474
- };
2477
+ input_audio: getInputAudioData(part)
2478
+ }, cacheControl && { cache_control: cacheControl });
2475
2479
  }
2476
2480
  const fileName = String(
2477
- (_g2 = (_f2 = (_e2 = (_d2 = part.providerOptions) == null ? void 0 : _d2.openrouter) == null ? void 0 : _e2.filename) != null ? _f2 : part.filename) != null ? _g2 : ""
2481
+ (_f2 = (_e2 = (_d2 = (_c2 = part.providerOptions) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d2.filename) != null ? _e2 : part.filename) != null ? _f2 : ""
2478
2482
  );
2479
2483
  const fileData = getFileUrl({
2480
2484
  part,
@@ -2492,21 +2496,19 @@ function convertToOpenRouterChatMessages(prompt) {
2492
2496
  }
2493
2497
  };
2494
2498
  }
2495
- return {
2499
+ return __spreadValues({
2496
2500
  type: "file",
2497
2501
  file: {
2498
2502
  filename: fileName,
2499
2503
  file_data: fileData
2500
- },
2501
- cache_control: cacheControl
2502
- };
2504
+ }
2505
+ }, cacheControl && { cache_control: cacheControl });
2503
2506
  }
2504
2507
  default: {
2505
- return {
2508
+ return __spreadValues({
2506
2509
  type: "text",
2507
- text: "",
2508
- cache_control: cacheControl
2509
- };
2510
+ text: ""
2511
+ }, cacheControl && { cache_control: cacheControl });
2510
2512
  }
2511
2513
  }
2512
2514
  }
@@ -2550,8 +2552,8 @@ function convertToOpenRouterChatMessages(prompt) {
2550
2552
  }
2551
2553
  }
2552
2554
  const parsedProviderOptions = OpenRouterProviderOptionsSchema.safeParse(providerOptions);
2553
- const messageReasoningDetails = parsedProviderOptions.success ? (_d = (_c = parsedProviderOptions.data) == null ? void 0 : _c.openrouter) == null ? void 0 : _d.reasoning_details : void 0;
2554
- const messageAnnotations = parsedProviderOptions.success ? (_f = (_e = parsedProviderOptions.data) == null ? void 0 : _e.openrouter) == null ? void 0 : _f.annotations : void 0;
2555
+ const messageReasoningDetails = parsedProviderOptions.success ? (_e = (_d = parsedProviderOptions.data) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.reasoning_details : void 0;
2556
+ const messageAnnotations = parsedProviderOptions.success ? (_g = (_f = parsedProviderOptions.data) == null ? void 0 : _f.openrouter) == null ? void 0 : _g.annotations : void 0;
2555
2557
  const finalReasoningDetails = messageReasoningDetails && Array.isArray(messageReasoningDetails) && messageReasoningDetails.length > 0 ? messageReasoningDetails : findFirstReasoningDetails(content);
2556
2558
  messages.push({
2557
2559
  role: "assistant",
@@ -2574,7 +2576,7 @@ function convertToOpenRouterChatMessages(prompt) {
2574
2576
  role: "tool",
2575
2577
  tool_call_id: toolResponse.toolCallId,
2576
2578
  content: content2,
2577
- cache_control: (_g = getCacheControl(providerOptions)) != null ? _g : getCacheControl(toolResponse.providerOptions)
2579
+ cache_control: (_h = getCacheControl(providerOptions)) != null ? _h : getCacheControl(toolResponse.providerOptions)
2578
2580
  });
2579
2581
  }
2580
2582
  break;
@@ -2714,20 +2716,21 @@ var OpenRouterNonStreamChatCompletionResponseSchema = z7.union([
2714
2716
  type: z7.literal("function"),
2715
2717
  function: z7.object({
2716
2718
  name: z7.string(),
2717
- arguments: z7.string()
2719
+ arguments: z7.string().optional()
2718
2720
  }).passthrough()
2719
2721
  }).passthrough()
2720
2722
  ).optional(),
2721
2723
  annotations: z7.array(
2722
2724
  z7.union([
2723
2725
  // URL citation from web search
2726
+ // title, start_index, end_index are optional as some upstream providers may omit them
2724
2727
  z7.object({
2725
2728
  type: z7.literal("url_citation"),
2726
2729
  url_citation: z7.object({
2727
- end_index: z7.number(),
2728
- start_index: z7.number(),
2729
- title: z7.string(),
2730
2730
  url: z7.string(),
2731
+ title: z7.string().optional(),
2732
+ start_index: z7.number().optional(),
2733
+ end_index: z7.number().optional(),
2731
2734
  content: z7.string().optional()
2732
2735
  }).passthrough()
2733
2736
  }).passthrough(),
@@ -2804,13 +2807,14 @@ var OpenRouterStreamChatCompletionChunkSchema = z7.union([
2804
2807
  annotations: z7.array(
2805
2808
  z7.union([
2806
2809
  // URL citation from web search
2810
+ // title, start_index, end_index are optional as some upstream providers may omit them
2807
2811
  z7.object({
2808
2812
  type: z7.literal("url_citation"),
2809
2813
  url_citation: z7.object({
2810
- end_index: z7.number(),
2811
- start_index: z7.number(),
2812
- title: z7.string(),
2813
2814
  url: z7.string(),
2815
+ title: z7.string().optional(),
2816
+ start_index: z7.number().optional(),
2817
+ end_index: z7.number().optional(),
2814
2818
  content: z7.string().optional()
2815
2819
  }).passthrough()
2816
2820
  }).passthrough(),
@@ -2957,7 +2961,7 @@ var OpenRouterChatLanguageModel = class {
2957
2961
  return baseArgs;
2958
2962
  }
2959
2963
  async doGenerate(options) {
2960
- var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
2964
+ var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
2961
2965
  const providerOptions = options.providerOptions || {};
2962
2966
  const openrouterOptions = providerOptions.openrouter || {};
2963
2967
  const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
@@ -3007,7 +3011,8 @@ var OpenRouterChatLanguageModel = class {
3007
3011
  total: (_d = response.usage.completion_tokens) != null ? _d : 0,
3008
3012
  text: void 0,
3009
3013
  reasoning: (_f = (_e = response.usage.completion_tokens_details) == null ? void 0 : _e.reasoning_tokens) != null ? _f : void 0
3010
- }
3014
+ },
3015
+ raw: response.usage
3011
3016
  } : {
3012
3017
  inputTokens: {
3013
3018
  total: 0,
@@ -3019,7 +3024,8 @@ var OpenRouterChatLanguageModel = class {
3019
3024
  total: 0,
3020
3025
  text: void 0,
3021
3026
  reasoning: void 0
3022
- }
3027
+ },
3028
+ raw: void 0
3023
3029
  };
3024
3030
  const reasoningDetails = (_g = choice.message.reasoning_details) != null ? _g : [];
3025
3031
  const reasoning = reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
@@ -3092,7 +3098,7 @@ var OpenRouterChatLanguageModel = class {
3092
3098
  type: "tool-call",
3093
3099
  toolCallId: (_h = toolCall.id) != null ? _h : generateId(),
3094
3100
  toolName: toolCall.function.name,
3095
- input: toolCall.function.arguments,
3101
+ input: (_i = toolCall.function.arguments) != null ? _i : "{}",
3096
3102
  providerMetadata: !reasoningDetailsAttachedToToolCall ? {
3097
3103
  openrouter: {
3098
3104
  reasoning_details: reasoningDetails
@@ -3119,17 +3125,19 @@ var OpenRouterChatLanguageModel = class {
3119
3125
  sourceType: "url",
3120
3126
  id: annotation.url_citation.url,
3121
3127
  url: annotation.url_citation.url,
3122
- title: annotation.url_citation.title,
3128
+ title: (_j = annotation.url_citation.title) != null ? _j : "",
3123
3129
  providerMetadata: {
3124
3130
  openrouter: {
3125
- content: annotation.url_citation.content || ""
3131
+ content: (_k = annotation.url_citation.content) != null ? _k : "",
3132
+ startIndex: (_l = annotation.url_citation.start_index) != null ? _l : 0,
3133
+ endIndex: (_m = annotation.url_citation.end_index) != null ? _m : 0
3126
3134
  }
3127
3135
  }
3128
3136
  });
3129
3137
  }
3130
3138
  }
3131
3139
  }
3132
- const fileAnnotations = (_i = choice.message.annotations) == null ? void 0 : _i.filter(
3140
+ const fileAnnotations = (_n = choice.message.annotations) == null ? void 0 : _n.filter(
3133
3141
  (a) => a.type === "file"
3134
3142
  );
3135
3143
  const hasToolCalls = choice.message.tool_calls && choice.message.tool_calls.length > 0;
@@ -3137,7 +3145,7 @@ var OpenRouterChatLanguageModel = class {
3137
3145
  (d) => d.type === "reasoning.encrypted" /* Encrypted */ && d.data
3138
3146
  );
3139
3147
  const shouldOverrideFinishReason = hasToolCalls && hasEncryptedReasoning && choice.finish_reason === "stop";
3140
- const effectiveFinishReason = shouldOverrideFinishReason ? createFinishReason("tool-calls", (_j = choice.finish_reason) != null ? _j : void 0) : mapOpenRouterFinishReason(choice.finish_reason);
3148
+ const effectiveFinishReason = shouldOverrideFinishReason ? createFinishReason("tool-calls", (_o = choice.finish_reason) != null ? _o : void 0) : mapOpenRouterFinishReason(choice.finish_reason);
3141
3149
  return {
3142
3150
  content,
3143
3151
  finishReason: effectiveFinishReason,
@@ -3145,23 +3153,22 @@ var OpenRouterChatLanguageModel = class {
3145
3153
  warnings: [],
3146
3154
  providerMetadata: {
3147
3155
  openrouter: OpenRouterProviderMetadataSchema.parse({
3148
- provider: (_k = response.provider) != null ? _k : "",
3149
- reasoning_details: (_l = choice.message.reasoning_details) != null ? _l : [],
3156
+ provider: (_p = response.provider) != null ? _p : "",
3157
+ reasoning_details: (_q = choice.message.reasoning_details) != null ? _q : [],
3150
3158
  annotations: fileAnnotations && fileAnnotations.length > 0 ? fileAnnotations : void 0,
3151
- usage: __spreadValues(__spreadValues(__spreadValues({
3152
- promptTokens: (_m = usageInfo.inputTokens.total) != null ? _m : 0,
3153
- completionTokens: (_n = usageInfo.outputTokens.total) != null ? _n : 0,
3154
- totalTokens: ((_o = usageInfo.inputTokens.total) != null ? _o : 0) + ((_p = usageInfo.outputTokens.total) != null ? _p : 0),
3155
- cost: (_q = response.usage) == null ? void 0 : _q.cost
3156
- }, ((_s = (_r = response.usage) == null ? void 0 : _r.prompt_tokens_details) == null ? void 0 : _s.cached_tokens) != null ? {
3159
+ usage: __spreadValues(__spreadValues(__spreadValues(__spreadValues({
3160
+ promptTokens: (_r = usageInfo.inputTokens.total) != null ? _r : 0,
3161
+ completionTokens: (_s = usageInfo.outputTokens.total) != null ? _s : 0,
3162
+ totalTokens: ((_t = usageInfo.inputTokens.total) != null ? _t : 0) + ((_u = usageInfo.outputTokens.total) != null ? _u : 0)
3163
+ }, ((_v = response.usage) == null ? void 0 : _v.cost) != null ? { cost: response.usage.cost } : {}), ((_x = (_w = response.usage) == null ? void 0 : _w.prompt_tokens_details) == null ? void 0 : _x.cached_tokens) != null ? {
3157
3164
  promptTokensDetails: {
3158
3165
  cachedTokens: response.usage.prompt_tokens_details.cached_tokens
3159
3166
  }
3160
- } : {}), ((_u = (_t = response.usage) == null ? void 0 : _t.completion_tokens_details) == null ? void 0 : _u.reasoning_tokens) != null ? {
3167
+ } : {}), ((_z = (_y = response.usage) == null ? void 0 : _y.completion_tokens_details) == null ? void 0 : _z.reasoning_tokens) != null ? {
3161
3168
  completionTokensDetails: {
3162
3169
  reasoningTokens: response.usage.completion_tokens_details.reasoning_tokens
3163
3170
  }
3164
- } : {}), ((_w = (_v = response.usage) == null ? void 0 : _v.cost_details) == null ? void 0 : _w.upstream_inference_cost) != null ? {
3171
+ } : {}), ((_B = (_A = response.usage) == null ? void 0 : _A.cost_details) == null ? void 0 : _B.upstream_inference_cost) != null ? {
3165
3172
  costDetails: {
3166
3173
  upstreamInferenceCost: response.usage.cost_details.upstream_inference_cost
3167
3174
  }
@@ -3214,9 +3221,11 @@ var OpenRouterChatLanguageModel = class {
3214
3221
  total: void 0,
3215
3222
  text: void 0,
3216
3223
  reasoning: void 0
3217
- }
3224
+ },
3225
+ raw: void 0
3218
3226
  };
3219
3227
  const openrouterUsage = {};
3228
+ let rawUsage;
3220
3229
  const accumulatedReasoningDetails = [];
3221
3230
  let reasoningDetailsAttachedToToolCall = false;
3222
3231
  const accumulatedFileAnnotations = [];
@@ -3230,7 +3239,10 @@ var OpenRouterChatLanguageModel = class {
3230
3239
  stream: response.pipeThrough(
3231
3240
  new TransformStream({
3232
3241
  transform(chunk, controller) {
3233
- var _a17, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
3242
+ var _a17, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
3243
+ if (options.includeRawChunks) {
3244
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3245
+ }
3234
3246
  if (!chunk.success) {
3235
3247
  finishReason = createFinishReason("error");
3236
3248
  controller.enqueue({ type: "error", error: chunk.error });
@@ -3261,6 +3273,7 @@ var OpenRouterChatLanguageModel = class {
3261
3273
  if (value.usage != null) {
3262
3274
  usage.inputTokens.total = value.usage.prompt_tokens;
3263
3275
  usage.outputTokens.total = value.usage.completion_tokens;
3276
+ rawUsage = value.usage;
3264
3277
  openrouterUsage.promptTokens = value.usage.prompt_tokens;
3265
3278
  if (value.usage.prompt_tokens_details) {
3266
3279
  const cachedInputTokens = (_a17 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a17 : 0;
@@ -3277,7 +3290,9 @@ var OpenRouterChatLanguageModel = class {
3277
3290
  reasoningTokens
3278
3291
  };
3279
3292
  }
3280
- openrouterUsage.cost = value.usage.cost;
3293
+ if (value.usage.cost != null) {
3294
+ openrouterUsage.cost = value.usage.cost;
3295
+ }
3281
3296
  openrouterUsage.totalTokens = value.usage.total_tokens;
3282
3297
  const upstreamInferenceCost = (_c = value.usage.cost_details) == null ? void 0 : _c.upstream_inference_cost;
3283
3298
  if (upstreamInferenceCost != null) {
@@ -3390,10 +3405,12 @@ var OpenRouterChatLanguageModel = class {
3390
3405
  sourceType: "url",
3391
3406
  id: annotation.url_citation.url,
3392
3407
  url: annotation.url_citation.url,
3393
- title: annotation.url_citation.title,
3408
+ title: (_d = annotation.url_citation.title) != null ? _d : "",
3394
3409
  providerMetadata: {
3395
3410
  openrouter: {
3396
- content: annotation.url_citation.content || ""
3411
+ content: (_e = annotation.url_citation.content) != null ? _e : "",
3412
+ startIndex: (_f = annotation.url_citation.start_index) != null ? _f : 0,
3413
+ endIndex: (_g = annotation.url_citation.end_index) != null ? _g : 0
3397
3414
  }
3398
3415
  }
3399
3416
  });
@@ -3409,7 +3426,7 @@ var OpenRouterChatLanguageModel = class {
3409
3426
  }
3410
3427
  if (delta.tool_calls != null) {
3411
3428
  for (const toolCallDelta of delta.tool_calls) {
3412
- const index = (_d = toolCallDelta.index) != null ? _d : toolCalls.length - 1;
3429
+ const index = (_h = toolCallDelta.index) != null ? _h : toolCalls.length - 1;
3413
3430
  if (toolCalls[index] == null) {
3414
3431
  if (toolCallDelta.type !== "function") {
3415
3432
  throw new InvalidResponseDataError({
@@ -3423,7 +3440,7 @@ var OpenRouterChatLanguageModel = class {
3423
3440
  message: `Expected 'id' to be a string.`
3424
3441
  });
3425
3442
  }
3426
- if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
3443
+ if (((_i = toolCallDelta.function) == null ? void 0 : _i.name) == null) {
3427
3444
  throw new InvalidResponseDataError({
3428
3445
  data: toolCallDelta,
3429
3446
  message: `Expected 'function.name' to be a string.`
@@ -3434,7 +3451,7 @@ var OpenRouterChatLanguageModel = class {
3434
3451
  type: "function",
3435
3452
  function: {
3436
3453
  name: toolCallDelta.function.name,
3437
- arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
3454
+ arguments: (_j = toolCallDelta.function.arguments) != null ? _j : ""
3438
3455
  },
3439
3456
  inputStarted: false,
3440
3457
  sent: false
@@ -3446,7 +3463,7 @@ var OpenRouterChatLanguageModel = class {
3446
3463
  message: `Tool call at index ${index} is missing after creation.`
3447
3464
  });
3448
3465
  }
3449
- if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
3466
+ if (((_k = toolCall2.function) == null ? void 0 : _k.name) != null && ((_l = toolCall2.function) == null ? void 0 : _l.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
3450
3467
  toolCall2.inputStarted = true;
3451
3468
  controller.enqueue({
3452
3469
  type: "tool-input-start",
@@ -3497,18 +3514,18 @@ var OpenRouterChatLanguageModel = class {
3497
3514
  toolName: toolCall.function.name
3498
3515
  });
3499
3516
  }
3500
- if (((_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null) {
3501
- toolCall.function.arguments += (_k = (_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null ? _k : "";
3517
+ if (((_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null) {
3518
+ toolCall.function.arguments += (_o = (_n = toolCallDelta.function) == null ? void 0 : _n.arguments) != null ? _o : "";
3502
3519
  }
3503
3520
  controller.enqueue({
3504
3521
  type: "tool-input-delta",
3505
3522
  id: toolCall.id,
3506
- delta: (_l = toolCallDelta.function.arguments) != null ? _l : ""
3523
+ delta: (_p = toolCallDelta.function.arguments) != null ? _p : ""
3507
3524
  });
3508
- if (((_m = toolCall.function) == null ? void 0 : _m.name) != null && ((_n = toolCall.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall.function.arguments)) {
3525
+ if (((_q = toolCall.function) == null ? void 0 : _q.name) != null && ((_r = toolCall.function) == null ? void 0 : _r.arguments) != null && isParsableJson(toolCall.function.arguments)) {
3509
3526
  controller.enqueue({
3510
3527
  type: "tool-call",
3511
- toolCallId: (_o = toolCall.id) != null ? _o : generateId(),
3528
+ toolCallId: (_s = toolCall.id) != null ? _s : generateId(),
3512
3529
  toolName: toolCall.function.name,
3513
3530
  input: toolCall.function.arguments,
3514
3531
  providerMetadata: !reasoningDetailsAttachedToToolCall ? {
@@ -3585,6 +3602,7 @@ var OpenRouterChatLanguageModel = class {
3585
3602
  if (accumulatedFileAnnotations.length > 0) {
3586
3603
  openrouterMetadata.annotations = accumulatedFileAnnotations;
3587
3604
  }
3605
+ usage.raw = rawUsage;
3588
3606
  controller.enqueue({
3589
3607
  type: "finish",
3590
3608
  finishReason,
@@ -3712,6 +3730,7 @@ var OpenRouterCompletionChunkSchema = z8.union([
3712
3730
  z8.object({
3713
3731
  id: z8.string().optional(),
3714
3732
  model: z8.string().optional(),
3733
+ provider: z8.string().optional(),
3715
3734
  choices: z8.array(
3716
3735
  z8.object({
3717
3736
  text: z8.string(),
@@ -3819,7 +3838,7 @@ var OpenRouterCompletionLanguageModel = class {
3819
3838
  }, this.config.extraBody), this.settings.extraBody);
3820
3839
  }
3821
3840
  async doGenerate(options) {
3822
- var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k;
3841
+ var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
3823
3842
  const providerOptions = options.providerOptions || {};
3824
3843
  const openrouterOptions = providerOptions.openrouter || {};
3825
3844
  const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
@@ -3876,9 +3895,32 @@ var OpenRouterCompletionLanguageModel = class {
3876
3895
  total: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0,
3877
3896
  text: void 0,
3878
3897
  reasoning: (_k = (_j = (_i = response.usage) == null ? void 0 : _i.completion_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0
3879
- }
3898
+ },
3899
+ raw: (_l = response.usage) != null ? _l : void 0
3880
3900
  },
3881
3901
  warnings: [],
3902
+ providerMetadata: {
3903
+ openrouter: OpenRouterProviderMetadataSchema.parse({
3904
+ provider: (_m = response.provider) != null ? _m : "",
3905
+ usage: __spreadValues(__spreadValues(__spreadValues(__spreadValues({
3906
+ promptTokens: (_o = (_n = response.usage) == null ? void 0 : _n.prompt_tokens) != null ? _o : 0,
3907
+ completionTokens: (_q = (_p = response.usage) == null ? void 0 : _p.completion_tokens) != null ? _q : 0,
3908
+ totalTokens: ((_s = (_r = response.usage) == null ? void 0 : _r.prompt_tokens) != null ? _s : 0) + ((_u = (_t = response.usage) == null ? void 0 : _t.completion_tokens) != null ? _u : 0)
3909
+ }, ((_v = response.usage) == null ? void 0 : _v.cost) != null ? { cost: response.usage.cost } : {}), ((_x = (_w = response.usage) == null ? void 0 : _w.prompt_tokens_details) == null ? void 0 : _x.cached_tokens) != null ? {
3910
+ promptTokensDetails: {
3911
+ cachedTokens: response.usage.prompt_tokens_details.cached_tokens
3912
+ }
3913
+ } : {}), ((_z = (_y = response.usage) == null ? void 0 : _y.completion_tokens_details) == null ? void 0 : _z.reasoning_tokens) != null ? {
3914
+ completionTokensDetails: {
3915
+ reasoningTokens: response.usage.completion_tokens_details.reasoning_tokens
3916
+ }
3917
+ } : {}), ((_B = (_A = response.usage) == null ? void 0 : _A.cost_details) == null ? void 0 : _B.upstream_inference_cost) != null ? {
3918
+ costDetails: {
3919
+ upstreamInferenceCost: response.usage.cost_details.upstream_inference_cost
3920
+ }
3921
+ } : {})
3922
+ })
3923
+ },
3882
3924
  response: {
3883
3925
  headers: responseHeaders
3884
3926
  }
@@ -3918,14 +3960,20 @@ var OpenRouterCompletionLanguageModel = class {
3918
3960
  total: void 0,
3919
3961
  text: void 0,
3920
3962
  reasoning: void 0
3921
- }
3963
+ },
3964
+ raw: void 0
3922
3965
  };
3923
3966
  const openrouterUsage = {};
3967
+ let provider;
3968
+ let rawUsage;
3924
3969
  return {
3925
3970
  stream: response.pipeThrough(
3926
3971
  new TransformStream({
3927
3972
  transform(chunk, controller) {
3928
3973
  var _a16, _b16, _c;
3974
+ if (options.includeRawChunks) {
3975
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3976
+ }
3929
3977
  if (!chunk.success) {
3930
3978
  finishReason = createFinishReason("error");
3931
3979
  controller.enqueue({ type: "error", error: chunk.error });
@@ -3937,9 +3985,13 @@ var OpenRouterCompletionLanguageModel = class {
3937
3985
  controller.enqueue({ type: "error", error: value.error });
3938
3986
  return;
3939
3987
  }
3988
+ if (value.provider) {
3989
+ provider = value.provider;
3990
+ }
3940
3991
  if (value.usage != null) {
3941
3992
  usage.inputTokens.total = value.usage.prompt_tokens;
3942
3993
  usage.outputTokens.total = value.usage.completion_tokens;
3994
+ rawUsage = value.usage;
3943
3995
  openrouterUsage.promptTokens = value.usage.prompt_tokens;
3944
3996
  if (value.usage.prompt_tokens_details) {
3945
3997
  const cachedInputTokens = (_a16 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a16 : 0;
@@ -3956,7 +4008,9 @@ var OpenRouterCompletionLanguageModel = class {
3956
4008
  reasoningTokens
3957
4009
  };
3958
4010
  }
3959
- openrouterUsage.cost = value.usage.cost;
4011
+ if (value.usage.cost != null) {
4012
+ openrouterUsage.cost = value.usage.cost;
4013
+ }
3960
4014
  openrouterUsage.totalTokens = value.usage.total_tokens;
3961
4015
  const upstreamInferenceCost = (_c = value.usage.cost_details) == null ? void 0 : _c.upstream_inference_cost;
3962
4016
  if (upstreamInferenceCost != null) {
@@ -3978,14 +4032,19 @@ var OpenRouterCompletionLanguageModel = class {
3978
4032
  }
3979
4033
  },
3980
4034
  flush(controller) {
4035
+ usage.raw = rawUsage;
4036
+ const openrouterMetadata = {
4037
+ usage: openrouterUsage
4038
+ };
4039
+ if (provider !== void 0) {
4040
+ openrouterMetadata.provider = provider;
4041
+ }
3981
4042
  controller.enqueue({
3982
4043
  type: "finish",
3983
4044
  finishReason,
3984
4045
  usage,
3985
4046
  providerMetadata: {
3986
- openrouter: {
3987
- usage: openrouterUsage
3988
- }
4047
+ openrouter: openrouterMetadata
3989
4048
  }
3990
4049
  });
3991
4050
  }
@@ -3997,8 +4056,227 @@ var OpenRouterCompletionLanguageModel = class {
3997
4056
  };
3998
4057
  }
3999
4058
  };
4059
+
4060
+ // src/embedding/schemas.ts
4061
+ import { z as z9 } from "zod/v4";
4062
+ var openrouterEmbeddingUsageSchema = z9.object({
4063
+ prompt_tokens: z9.number(),
4064
+ total_tokens: z9.number(),
4065
+ cost: z9.number().optional()
4066
+ });
4067
+ var openrouterEmbeddingDataSchema = z9.object({
4068
+ object: z9.literal("embedding"),
4069
+ embedding: z9.array(z9.number()),
4070
+ index: z9.number().optional()
4071
+ });
4072
+ var OpenRouterEmbeddingResponseSchema = z9.object({
4073
+ id: z9.string().optional(),
4074
+ object: z9.literal("list"),
4075
+ data: z9.array(openrouterEmbeddingDataSchema),
4076
+ model: z9.string(),
4077
+ provider: z9.string().optional(),
4078
+ usage: openrouterEmbeddingUsageSchema.optional()
4079
+ });
4080
+
4081
+ // src/embedding/index.ts
4082
+ var OpenRouterEmbeddingModel = class {
4083
+ constructor(modelId, settings, config) {
4084
+ this.specificationVersion = "v3";
4085
+ this.provider = "openrouter";
4086
+ this.maxEmbeddingsPerCall = void 0;
4087
+ this.supportsParallelCalls = true;
4088
+ this.modelId = modelId;
4089
+ this.settings = settings;
4090
+ this.config = config;
4091
+ }
4092
+ async doEmbed(options) {
4093
+ var _a16, _b16, _c, _d, _e, _f;
4094
+ const { values, abortSignal, headers } = options;
4095
+ const args = __spreadValues(__spreadValues({
4096
+ model: this.modelId,
4097
+ input: values,
4098
+ user: this.settings.user,
4099
+ provider: this.settings.provider
4100
+ }, this.config.extraBody), this.settings.extraBody);
4101
+ const { value: responseValue, responseHeaders } = await postJsonToApi({
4102
+ url: this.config.url({
4103
+ path: "/embeddings",
4104
+ modelId: this.modelId
4105
+ }),
4106
+ headers: combineHeaders(this.config.headers(), headers),
4107
+ body: args,
4108
+ failedResponseHandler: openrouterFailedResponseHandler,
4109
+ successfulResponseHandler: createJsonResponseHandler(
4110
+ OpenRouterEmbeddingResponseSchema
4111
+ ),
4112
+ abortSignal,
4113
+ fetch: this.config.fetch
4114
+ });
4115
+ return {
4116
+ embeddings: responseValue.data.map((item) => item.embedding),
4117
+ usage: responseValue.usage ? { tokens: responseValue.usage.prompt_tokens } : void 0,
4118
+ providerMetadata: {
4119
+ openrouter: OpenRouterProviderMetadataSchema.parse({
4120
+ provider: (_a16 = responseValue.provider) != null ? _a16 : "",
4121
+ usage: __spreadValues({
4122
+ promptTokens: (_c = (_b16 = responseValue.usage) == null ? void 0 : _b16.prompt_tokens) != null ? _c : 0,
4123
+ completionTokens: 0,
4124
+ totalTokens: (_e = (_d = responseValue.usage) == null ? void 0 : _d.total_tokens) != null ? _e : 0
4125
+ }, ((_f = responseValue.usage) == null ? void 0 : _f.cost) != null ? { cost: responseValue.usage.cost } : {})
4126
+ })
4127
+ },
4128
+ response: {
4129
+ headers: responseHeaders,
4130
+ body: responseValue
4131
+ },
4132
+ warnings: []
4133
+ };
4134
+ }
4135
+ };
4136
+
4137
+ // src/image/schemas.ts
4138
+ import { z as z10 } from "zod/v4";
4139
+ var OpenRouterImageResponseSchema = z10.object({
4140
+ id: z10.string().optional(),
4141
+ object: z10.string().optional(),
4142
+ created: z10.number().optional(),
4143
+ model: z10.string(),
4144
+ choices: z10.array(
4145
+ z10.object({
4146
+ index: z10.number(),
4147
+ message: z10.object({
4148
+ role: z10.string(),
4149
+ content: z10.string().nullable().optional(),
4150
+ images: z10.array(
4151
+ z10.object({
4152
+ type: z10.literal("image_url"),
4153
+ image_url: z10.object({
4154
+ url: z10.string()
4155
+ })
4156
+ }).passthrough()
4157
+ ).optional()
4158
+ }).passthrough(),
4159
+ finish_reason: z10.string().nullable().optional()
4160
+ }).passthrough()
4161
+ ),
4162
+ usage: z10.object({
4163
+ prompt_tokens: z10.number(),
4164
+ completion_tokens: z10.number(),
4165
+ total_tokens: z10.number()
4166
+ }).passthrough().optional()
4167
+ }).passthrough();
4168
+
4169
+ // src/image/index.ts
4170
+ var OpenRouterImageModel = class {
4171
+ constructor(modelId, settings, config) {
4172
+ this.specificationVersion = "v3";
4173
+ this.provider = "openrouter";
4174
+ this.maxImagesPerCall = 1;
4175
+ this.modelId = modelId;
4176
+ this.settings = settings;
4177
+ this.config = config;
4178
+ }
4179
+ async doGenerate(options) {
4180
+ var _a16;
4181
+ const {
4182
+ prompt,
4183
+ n,
4184
+ size,
4185
+ aspectRatio,
4186
+ seed,
4187
+ files,
4188
+ mask,
4189
+ abortSignal,
4190
+ headers,
4191
+ providerOptions
4192
+ } = options;
4193
+ const openrouterOptions = (providerOptions == null ? void 0 : providerOptions.openrouter) || {};
4194
+ const warnings = [];
4195
+ if (files !== void 0 && files.length > 0) {
4196
+ throw new UnsupportedFunctionalityError({
4197
+ functionality: "image editing (files parameter)"
4198
+ });
4199
+ }
4200
+ if (mask !== void 0) {
4201
+ throw new UnsupportedFunctionalityError({
4202
+ functionality: "image inpainting (mask parameter)"
4203
+ });
4204
+ }
4205
+ if (n > 1) {
4206
+ warnings.push({
4207
+ type: "unsupported",
4208
+ feature: "n > 1",
4209
+ details: `OpenRouter image generation returns 1 image per call. Requested ${n} images.`
4210
+ });
4211
+ }
4212
+ if (size !== void 0) {
4213
+ warnings.push({
4214
+ type: "unsupported",
4215
+ feature: "size",
4216
+ details: "Use aspectRatio instead. Size parameter is not supported by OpenRouter image generation."
4217
+ });
4218
+ }
4219
+ const imageConfig = aspectRatio !== void 0 ? { aspect_ratio: aspectRatio } : void 0;
4220
+ const body = __spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues({
4221
+ model: this.modelId,
4222
+ messages: [
4223
+ {
4224
+ role: "user",
4225
+ content: prompt != null ? prompt : ""
4226
+ }
4227
+ ],
4228
+ modalities: ["image", "text"]
4229
+ }, imageConfig !== void 0 && { image_config: imageConfig }), seed !== void 0 && { seed }), this.settings.user !== void 0 && { user: this.settings.user }), this.settings.provider !== void 0 && {
4230
+ provider: this.settings.provider
4231
+ }), this.config.extraBody), this.settings.extraBody), openrouterOptions);
4232
+ const { value: responseValue, responseHeaders } = await postJsonToApi({
4233
+ url: this.config.url({
4234
+ path: "/chat/completions",
4235
+ modelId: this.modelId
4236
+ }),
4237
+ headers: combineHeaders(this.config.headers(), headers),
4238
+ body,
4239
+ failedResponseHandler: openrouterFailedResponseHandler,
4240
+ successfulResponseHandler: createJsonResponseHandler(
4241
+ OpenRouterImageResponseSchema
4242
+ ),
4243
+ abortSignal,
4244
+ fetch: this.config.fetch
4245
+ });
4246
+ const choice = responseValue.choices[0];
4247
+ if (!choice) {
4248
+ throw new NoContentGeneratedError({
4249
+ message: "No choice in response"
4250
+ });
4251
+ }
4252
+ const images = [];
4253
+ if ((_a16 = choice.message) == null ? void 0 : _a16.images) {
4254
+ for (const image of choice.message.images) {
4255
+ const dataUrl = image.image_url.url;
4256
+ images.push(getBase64FromDataUrl(dataUrl));
4257
+ }
4258
+ }
4259
+ const usage = responseValue.usage ? {
4260
+ inputTokens: responseValue.usage.prompt_tokens,
4261
+ outputTokens: responseValue.usage.completion_tokens,
4262
+ totalTokens: responseValue.usage.total_tokens
4263
+ } : void 0;
4264
+ return {
4265
+ images,
4266
+ warnings,
4267
+ response: {
4268
+ timestamp: /* @__PURE__ */ new Date(),
4269
+ modelId: responseValue.model,
4270
+ headers: responseHeaders
4271
+ },
4272
+ usage
4273
+ };
4274
+ }
4275
+ };
4000
4276
  export {
4001
4277
  OpenRouterChatLanguageModel,
4002
- OpenRouterCompletionLanguageModel
4278
+ OpenRouterCompletionLanguageModel,
4279
+ OpenRouterEmbeddingModel,
4280
+ OpenRouterImageModel
4003
4281
  };
4004
4282
  //# sourceMappingURL=index.mjs.map