@openrouter/ai-sdk-provider 0.4.6 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -262,7 +262,7 @@ var OpenRouterChatLanguageModel = class {
262
262
  }) {
263
263
  var _a;
264
264
  const type = mode.type;
265
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
265
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
266
266
  const baseArgs = __spreadValues(__spreadValues(__spreadValues({
267
267
  // model id:
268
268
  model: this.modelId,
@@ -287,7 +287,8 @@ var OpenRouterChatLanguageModel = class {
287
287
  messages: convertToOpenRouterChatMessages(prompt),
288
288
  // OpenRouter specific settings:
289
289
  include_reasoning: this.settings.includeReasoning,
290
- reasoning: this.settings.reasoning
290
+ reasoning: this.settings.reasoning,
291
+ usage: this.settings.usage
291
292
  }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
292
293
  switch (type) {
293
294
  case "regular": {
@@ -323,7 +324,7 @@ var OpenRouterChatLanguageModel = class {
323
324
  }
324
325
  }
325
326
  async doGenerate(options) {
326
- var _b, _c, _d, _e, _f, _g, _h;
327
+ var _b, _c, _d, _e, _f, _g, _h, _i, _j;
327
328
  const args = this.getArgs(options);
328
329
  const { responseHeaders, value: response } = await postJsonToApi({
329
330
  url: this.config.url({
@@ -344,14 +345,39 @@ var OpenRouterChatLanguageModel = class {
344
345
  if (!choice) {
345
346
  throw new Error("No choice in response");
346
347
  }
347
- return {
348
+ const usageInfo = response.usage ? {
349
+ promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
350
+ completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
351
+ } : {
352
+ promptTokens: 0,
353
+ completionTokens: 0
354
+ };
355
+ const providerMetadata = {};
356
+ if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
357
+ providerMetadata.openrouter = {
358
+ usage: {
359
+ promptTokens: response.usage.prompt_tokens,
360
+ promptTokensDetails: response.usage.prompt_tokens_details ? {
361
+ cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
362
+ } : void 0,
363
+ completionTokens: response.usage.completion_tokens,
364
+ completionTokensDetails: response.usage.completion_tokens_details ? {
365
+ reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
366
+ } : void 0,
367
+ cost: response.usage.cost,
368
+ totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
369
+ }
370
+ };
371
+ }
372
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
373
+ return __spreadValues({
348
374
  response: {
349
375
  id: response.id,
350
376
  modelId: response.model
351
377
  },
352
- text: (_b = choice.message.content) != null ? _b : void 0,
353
- reasoning: (_c = choice.message.reasoning) != null ? _c : void 0,
354
- toolCalls: (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
378
+ text: (_h = choice.message.content) != null ? _h : void 0,
379
+ reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
380
+ toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
355
381
  var _a2;
356
382
  return {
357
383
  toolCallType: "function",
@@ -361,17 +387,15 @@ var OpenRouterChatLanguageModel = class {
361
387
  };
362
388
  }),
363
389
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
364
- usage: {
365
- promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
366
- completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
367
- },
390
+ usage: usageInfo,
368
391
  rawCall: { rawPrompt, rawSettings },
369
392
  rawResponse: { headers: responseHeaders },
370
393
  warnings: [],
371
394
  logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
372
- };
395
+ }, hasProviderMetadata ? { providerMetadata } : {});
373
396
  }
374
397
  async doStream(options) {
398
+ var _a, _c;
375
399
  const args = this.getArgs(options);
376
400
  const { responseHeaders, value: response } = await postJsonToApi({
377
401
  url: this.config.url({
@@ -382,7 +406,9 @@ var OpenRouterChatLanguageModel = class {
382
406
  body: __spreadProps(__spreadValues({}, args), {
383
407
  stream: true,
384
408
  // only include stream_options when in strict compatibility mode:
385
- stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
409
+ stream_options: this.config.compatibility === "strict" ? __spreadValues({
410
+ include_usage: true
411
+ }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
386
412
  }),
387
413
  failedResponseHandler: openrouterFailedResponseHandler,
388
414
  successfulResponseHandler: createEventSourceResponseHandler(
@@ -391,7 +417,7 @@ var OpenRouterChatLanguageModel = class {
391
417
  abortSignal: options.abortSignal,
392
418
  fetch: this.config.fetch
393
419
  });
394
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
420
+ const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
395
421
  const toolCalls = [];
396
422
  let finishReason = "other";
397
423
  let usage = {
@@ -399,11 +425,13 @@ var OpenRouterChatLanguageModel = class {
399
425
  completionTokens: Number.NaN
400
426
  };
401
427
  let logprobs;
428
+ const openrouterUsage = {};
429
+ const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
402
430
  return {
403
431
  stream: response.pipeThrough(
404
432
  new TransformStream({
405
433
  transform(chunk, controller) {
406
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
434
+ var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
407
435
  if (!chunk.success) {
408
436
  finishReason = "error";
409
437
  controller.enqueue({ type: "error", error: chunk.error });
@@ -432,6 +460,20 @@ var OpenRouterChatLanguageModel = class {
432
460
  promptTokens: value.usage.prompt_tokens,
433
461
  completionTokens: value.usage.completion_tokens
434
462
  };
463
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
464
+ if (value.usage.prompt_tokens_details) {
465
+ openrouterUsage.promptTokensDetails = {
466
+ cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
467
+ };
468
+ }
469
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
470
+ if (value.usage.completion_tokens_details) {
471
+ openrouterUsage.completionTokensDetails = {
472
+ reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
473
+ };
474
+ }
475
+ openrouterUsage.cost = value.usage.cost;
476
+ openrouterUsage.totalTokens = value.usage.total_tokens;
435
477
  }
436
478
  const choice = value.choices[0];
437
479
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -476,7 +518,7 @@ var OpenRouterChatLanguageModel = class {
476
518
  message: `Expected 'id' to be a string.`
477
519
  });
478
520
  }
479
- if (((_a2 = toolCallDelta.function) == null ? void 0 : _a2.name) == null) {
521
+ if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
480
522
  throw new InvalidResponseDataError({
481
523
  data: toolCallDelta,
482
524
  message: `Expected 'function.name' to be a string.`
@@ -487,7 +529,7 @@ var OpenRouterChatLanguageModel = class {
487
529
  type: "function",
488
530
  function: {
489
531
  name: toolCallDelta.function.name,
490
- arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
532
+ arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
491
533
  },
492
534
  sent: false
493
535
  };
@@ -495,7 +537,7 @@ var OpenRouterChatLanguageModel = class {
495
537
  if (toolCall2 == null) {
496
538
  throw new Error("Tool call is missing");
497
539
  }
498
- if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
540
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
499
541
  controller.enqueue({
500
542
  type: "tool-call-delta",
501
543
  toolCallType: "function",
@@ -506,7 +548,7 @@ var OpenRouterChatLanguageModel = class {
506
548
  controller.enqueue({
507
549
  type: "tool-call",
508
550
  toolCallType: "function",
509
- toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
551
+ toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
510
552
  toolName: toolCall2.function.name,
511
553
  args: toolCall2.function.arguments
512
554
  });
@@ -518,21 +560,21 @@ var OpenRouterChatLanguageModel = class {
518
560
  if (toolCall == null) {
519
561
  throw new Error("Tool call is missing");
520
562
  }
521
- if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
522
- toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
563
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
564
+ toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
523
565
  }
524
566
  controller.enqueue({
525
567
  type: "tool-call-delta",
526
568
  toolCallType: "function",
527
569
  toolCallId: toolCall.id,
528
570
  toolName: toolCall.function.name,
529
- argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
571
+ argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
530
572
  });
531
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
573
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
532
574
  controller.enqueue({
533
575
  type: "tool-call",
534
576
  toolCallType: "function",
535
- toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
577
+ toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
536
578
  toolName: toolCall.function.name,
537
579
  args: toolCall.function.arguments
538
580
  });
@@ -558,12 +600,19 @@ var OpenRouterChatLanguageModel = class {
558
600
  }
559
601
  }
560
602
  }
561
- controller.enqueue({
603
+ const providerMetadata = {};
604
+ if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
605
+ providerMetadata.openrouter = {
606
+ usage: openrouterUsage
607
+ };
608
+ }
609
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
610
+ controller.enqueue(__spreadValues({
562
611
  type: "finish",
563
612
  finishReason,
564
613
  logprobs,
565
614
  usage
566
- });
615
+ }, hasProviderMetadata ? { providerMetadata } : {}));
567
616
  }
568
617
  })
569
618
  ),
@@ -578,8 +627,15 @@ var OpenRouterChatCompletionBaseResponseSchema = z2.object({
578
627
  model: z2.string().optional(),
579
628
  usage: z2.object({
580
629
  prompt_tokens: z2.number(),
630
+ prompt_tokens_details: z2.object({
631
+ cached_tokens: z2.number()
632
+ }).optional(),
581
633
  completion_tokens: z2.number(),
582
- total_tokens: z2.number()
634
+ completion_tokens_details: z2.object({
635
+ reasoning_tokens: z2.number()
636
+ }).optional(),
637
+ total_tokens: z2.number(),
638
+ cost: z2.number().optional()
583
639
  }).nullish()
584
640
  });
585
641
  var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
@@ -676,14 +732,13 @@ function prepareToolsAndToolChoice(mode) {
676
732
  parameters: tool.parameters
677
733
  }
678
734
  };
679
- } else {
680
- return {
681
- type: "function",
682
- function: {
683
- name: tool.name
684
- }
685
- };
686
735
  }
736
+ return {
737
+ type: "function",
738
+ function: {
739
+ name: tool.name
740
+ }
741
+ };
687
742
  });
688
743
  const toolChoice = mode.toolChoice;
689
744
  if (toolChoice == null) {
@@ -880,7 +935,7 @@ var OpenRouterCompletionLanguageModel = class {
880
935
  }) {
881
936
  var _a, _b;
882
937
  const type = mode.type;
883
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
938
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
884
939
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
885
940
  prompt,
886
941
  inputFormat
@@ -1177,9 +1232,7 @@ function createOpenRouter(options = {}) {
1177
1232
  }
1178
1233
  return createChatModel(modelId, settings);
1179
1234
  };
1180
- const provider = function(modelId, settings) {
1181
- return createLanguageModel(modelId, settings);
1182
- };
1235
+ const provider = (modelId, settings) => createLanguageModel(modelId, settings);
1183
1236
  provider.languageModel = createLanguageModel;
1184
1237
  provider.chat = createChatModel;
1185
1238
  provider.completion = createCompletionModel;