@openrouter/ai-sdk-provider 0.4.6 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -92,14 +92,15 @@ function convertToOpenRouterChatMessages(prompt) {
92
92
  const messageCacheControl = getCacheControl(providerMetadata);
93
93
  const contentParts = content.map(
94
94
  (part) => {
95
- var _a2, _b2, _c2, _d, _e, _f;
95
+ var _a2, _b2, _c2, _d;
96
+ const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
96
97
  switch (part.type) {
97
98
  case "text":
98
99
  return {
99
100
  type: "text",
100
101
  text: part.text,
101
102
  // For text parts, only use part-specific cache control
102
- cache_control: (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl
103
+ cache_control: cacheControl
103
104
  };
104
105
  case "image":
105
106
  return {
@@ -110,18 +111,18 @@ function convertToOpenRouterChatMessages(prompt) {
110
111
  )}`
111
112
  },
112
113
  // For image parts, use part-specific or message-level cache control
113
- cache_control: (_c2 = getCacheControl(part.providerMetadata)) != null ? _c2 : messageCacheControl
114
+ cache_control: cacheControl
114
115
  };
115
116
  case "file":
116
117
  return {
117
118
  type: "file",
118
119
  file: {
119
120
  filename: String(
120
- (_e = (_d = part.providerMetadata) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.filename
121
+ (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
121
122
  ),
122
123
  file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.data)}` : `data:${part.mimeType};base64,${part.data}`
123
124
  },
124
- cache_control: (_f = getCacheControl(part.providerMetadata)) != null ? _f : messageCacheControl
125
+ cache_control: cacheControl
125
126
  };
126
127
  default: {
127
128
  const _exhaustiveCheck = part;
@@ -158,6 +159,7 @@ function convertToOpenRouterChatMessages(prompt) {
158
159
  });
159
160
  break;
160
161
  }
162
+ case "file":
161
163
  // TODO: Handle reasoning and redacted-reasoning
162
164
  case "reasoning":
163
165
  case "redacted-reasoning":
@@ -273,7 +275,7 @@ var OpenRouterChatLanguageModel = class {
273
275
  }) {
274
276
  var _a;
275
277
  const type = mode.type;
276
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
278
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
277
279
  const baseArgs = __spreadValues(__spreadValues(__spreadValues({
278
280
  // model id:
279
281
  model: this.modelId,
@@ -298,7 +300,8 @@ var OpenRouterChatLanguageModel = class {
298
300
  messages: convertToOpenRouterChatMessages(prompt),
299
301
  // OpenRouter specific settings:
300
302
  include_reasoning: this.settings.includeReasoning,
301
- reasoning: this.settings.reasoning
303
+ reasoning: this.settings.reasoning,
304
+ usage: this.settings.usage
302
305
  }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
303
306
  switch (type) {
304
307
  case "regular": {
@@ -334,7 +337,7 @@ var OpenRouterChatLanguageModel = class {
334
337
  }
335
338
  }
336
339
  async doGenerate(options) {
337
- var _b, _c, _d, _e, _f, _g, _h;
340
+ var _b, _c, _d, _e, _f, _g, _h, _i, _j;
338
341
  const args = this.getArgs(options);
339
342
  const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
340
343
  url: this.config.url({
@@ -355,14 +358,39 @@ var OpenRouterChatLanguageModel = class {
355
358
  if (!choice) {
356
359
  throw new Error("No choice in response");
357
360
  }
358
- return {
361
+ const usageInfo = response.usage ? {
362
+ promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
363
+ completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
364
+ } : {
365
+ promptTokens: 0,
366
+ completionTokens: 0
367
+ };
368
+ const providerMetadata = {};
369
+ if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
370
+ providerMetadata.openrouter = {
371
+ usage: {
372
+ promptTokens: response.usage.prompt_tokens,
373
+ promptTokensDetails: response.usage.prompt_tokens_details ? {
374
+ cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
375
+ } : void 0,
376
+ completionTokens: response.usage.completion_tokens,
377
+ completionTokensDetails: response.usage.completion_tokens_details ? {
378
+ reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
379
+ } : void 0,
380
+ cost: response.usage.cost,
381
+ totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
382
+ }
383
+ };
384
+ }
385
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
386
+ return __spreadValues({
359
387
  response: {
360
388
  id: response.id,
361
389
  modelId: response.model
362
390
  },
363
- text: (_b = choice.message.content) != null ? _b : void 0,
364
- reasoning: (_c = choice.message.reasoning) != null ? _c : void 0,
365
- toolCalls: (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
391
+ text: (_h = choice.message.content) != null ? _h : void 0,
392
+ reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
393
+ toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
366
394
  var _a2;
367
395
  return {
368
396
  toolCallType: "function",
@@ -372,17 +400,15 @@ var OpenRouterChatLanguageModel = class {
372
400
  };
373
401
  }),
374
402
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
375
- usage: {
376
- promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
377
- completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
378
- },
403
+ usage: usageInfo,
379
404
  rawCall: { rawPrompt, rawSettings },
380
405
  rawResponse: { headers: responseHeaders },
381
406
  warnings: [],
382
407
  logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
383
- };
408
+ }, hasProviderMetadata ? { providerMetadata } : {});
384
409
  }
385
410
  async doStream(options) {
411
+ var _a, _c;
386
412
  const args = this.getArgs(options);
387
413
  const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
388
414
  url: this.config.url({
@@ -393,7 +419,9 @@ var OpenRouterChatLanguageModel = class {
393
419
  body: __spreadProps(__spreadValues({}, args), {
394
420
  stream: true,
395
421
  // only include stream_options when in strict compatibility mode:
396
- stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
422
+ stream_options: this.config.compatibility === "strict" ? __spreadValues({
423
+ include_usage: true
424
+ }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
397
425
  }),
398
426
  failedResponseHandler: openrouterFailedResponseHandler,
399
427
  successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
@@ -402,7 +430,7 @@ var OpenRouterChatLanguageModel = class {
402
430
  abortSignal: options.abortSignal,
403
431
  fetch: this.config.fetch
404
432
  });
405
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
433
+ const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
406
434
  const toolCalls = [];
407
435
  let finishReason = "other";
408
436
  let usage = {
@@ -410,11 +438,13 @@ var OpenRouterChatLanguageModel = class {
410
438
  completionTokens: Number.NaN
411
439
  };
412
440
  let logprobs;
441
+ const openrouterUsage = {};
442
+ const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
413
443
  return {
414
444
  stream: response.pipeThrough(
415
445
  new TransformStream({
416
446
  transform(chunk, controller) {
417
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
447
+ var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
418
448
  if (!chunk.success) {
419
449
  finishReason = "error";
420
450
  controller.enqueue({ type: "error", error: chunk.error });
@@ -443,6 +473,20 @@ var OpenRouterChatLanguageModel = class {
443
473
  promptTokens: value.usage.prompt_tokens,
444
474
  completionTokens: value.usage.completion_tokens
445
475
  };
476
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
477
+ if (value.usage.prompt_tokens_details) {
478
+ openrouterUsage.promptTokensDetails = {
479
+ cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
480
+ };
481
+ }
482
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
483
+ if (value.usage.completion_tokens_details) {
484
+ openrouterUsage.completionTokensDetails = {
485
+ reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
486
+ };
487
+ }
488
+ openrouterUsage.cost = value.usage.cost;
489
+ openrouterUsage.totalTokens = value.usage.total_tokens;
446
490
  }
447
491
  const choice = value.choices[0];
448
492
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -487,7 +531,7 @@ var OpenRouterChatLanguageModel = class {
487
531
  message: `Expected 'id' to be a string.`
488
532
  });
489
533
  }
490
- if (((_a2 = toolCallDelta.function) == null ? void 0 : _a2.name) == null) {
534
+ if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
491
535
  throw new import_provider.InvalidResponseDataError({
492
536
  data: toolCallDelta,
493
537
  message: `Expected 'function.name' to be a string.`
@@ -498,7 +542,7 @@ var OpenRouterChatLanguageModel = class {
498
542
  type: "function",
499
543
  function: {
500
544
  name: toolCallDelta.function.name,
501
- arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
545
+ arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
502
546
  },
503
547
  sent: false
504
548
  };
@@ -506,7 +550,7 @@ var OpenRouterChatLanguageModel = class {
506
550
  if (toolCall2 == null) {
507
551
  throw new Error("Tool call is missing");
508
552
  }
509
- if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
553
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
510
554
  controller.enqueue({
511
555
  type: "tool-call-delta",
512
556
  toolCallType: "function",
@@ -517,7 +561,7 @@ var OpenRouterChatLanguageModel = class {
517
561
  controller.enqueue({
518
562
  type: "tool-call",
519
563
  toolCallType: "function",
520
- toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils3.generateId)(),
564
+ toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
521
565
  toolName: toolCall2.function.name,
522
566
  args: toolCall2.function.arguments
523
567
  });
@@ -529,21 +573,21 @@ var OpenRouterChatLanguageModel = class {
529
573
  if (toolCall == null) {
530
574
  throw new Error("Tool call is missing");
531
575
  }
532
- if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
533
- toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
576
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
577
+ toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
534
578
  }
535
579
  controller.enqueue({
536
580
  type: "tool-call-delta",
537
581
  toolCallType: "function",
538
582
  toolCallId: toolCall.id,
539
583
  toolName: toolCall.function.name,
540
- argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
584
+ argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
541
585
  });
542
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
586
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
543
587
  controller.enqueue({
544
588
  type: "tool-call",
545
589
  toolCallType: "function",
546
- toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils3.generateId)(),
590
+ toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
547
591
  toolName: toolCall.function.name,
548
592
  args: toolCall.function.arguments
549
593
  });
@@ -569,12 +613,19 @@ var OpenRouterChatLanguageModel = class {
569
613
  }
570
614
  }
571
615
  }
572
- controller.enqueue({
616
+ const providerMetadata = {};
617
+ if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
618
+ providerMetadata.openrouter = {
619
+ usage: openrouterUsage
620
+ };
621
+ }
622
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
623
+ controller.enqueue(__spreadValues({
573
624
  type: "finish",
574
625
  finishReason,
575
626
  logprobs,
576
627
  usage
577
- });
628
+ }, hasProviderMetadata ? { providerMetadata } : {}));
578
629
  }
579
630
  })
580
631
  ),
@@ -589,8 +640,15 @@ var OpenRouterChatCompletionBaseResponseSchema = import_zod2.z.object({
589
640
  model: import_zod2.z.string().optional(),
590
641
  usage: import_zod2.z.object({
591
642
  prompt_tokens: import_zod2.z.number(),
643
+ prompt_tokens_details: import_zod2.z.object({
644
+ cached_tokens: import_zod2.z.number()
645
+ }).optional(),
592
646
  completion_tokens: import_zod2.z.number(),
593
- total_tokens: import_zod2.z.number()
647
+ completion_tokens_details: import_zod2.z.object({
648
+ reasoning_tokens: import_zod2.z.number()
649
+ }).optional(),
650
+ total_tokens: import_zod2.z.number(),
651
+ cost: import_zod2.z.number().optional()
594
652
  }).nullish()
595
653
  });
596
654
  var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
@@ -687,14 +745,13 @@ function prepareToolsAndToolChoice(mode) {
687
745
  parameters: tool.parameters
688
746
  }
689
747
  };
690
- } else {
691
- return {
692
- type: "function",
693
- function: {
694
- name: tool.name
695
- }
696
- };
697
748
  }
749
+ return {
750
+ type: "function",
751
+ function: {
752
+ name: tool.name
753
+ }
754
+ };
698
755
  });
699
756
  const toolChoice = mode.toolChoice;
700
757
  if (toolChoice == null) {
@@ -805,6 +862,11 @@ ${userMessage}
805
862
  functionality: "redacted reasoning messages"
806
863
  });
807
864
  }
865
+ case "file": {
866
+ throw new import_provider2.UnsupportedFunctionalityError({
867
+ functionality: "file attachments"
868
+ });
869
+ }
808
870
  default: {
809
871
  const _exhaustiveCheck = part;
810
872
  throw new Error(
@@ -883,7 +945,7 @@ var OpenRouterCompletionLanguageModel = class {
883
945
  }) {
884
946
  var _a, _b;
885
947
  const type = mode.type;
886
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
948
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
887
949
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
888
950
  prompt,
889
951
  inputFormat