@openrouter/ai-sdk-provider 0.4.5 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -81,7 +81,7 @@ function convertToOpenRouterChatMessages(prompt) {
81
81
  const messageCacheControl = getCacheControl(providerMetadata);
82
82
  const contentParts = content.map(
83
83
  (part) => {
84
- var _a2, _b2, _c2, _d;
84
+ var _a2, _b2, _c2, _d, _e, _f;
85
85
  switch (part.type) {
86
86
  case "text":
87
87
  return {
@@ -103,9 +103,14 @@ function convertToOpenRouterChatMessages(prompt) {
103
103
  };
104
104
  case "file":
105
105
  return {
106
- type: "text",
107
- text: part.data instanceof URL ? part.data.toString() : part.data,
108
- cache_control: (_d = getCacheControl(part.providerMetadata)) != null ? _d : messageCacheControl
106
+ type: "file",
107
+ file: {
108
+ filename: String(
109
+ (_e = (_d = part.providerMetadata) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.filename
110
+ ),
111
+ file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
112
+ },
113
+ cache_control: (_f = getCacheControl(part.providerMetadata)) != null ? _f : messageCacheControl
109
114
  };
110
115
  default: {
111
116
  const _exhaustiveCheck = part;
@@ -257,7 +262,7 @@ var OpenRouterChatLanguageModel = class {
257
262
  }) {
258
263
  var _a;
259
264
  const type = mode.type;
260
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
265
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
261
266
  const baseArgs = __spreadValues(__spreadValues(__spreadValues({
262
267
  // model id:
263
268
  model: this.modelId,
@@ -282,7 +287,8 @@ var OpenRouterChatLanguageModel = class {
282
287
  messages: convertToOpenRouterChatMessages(prompt),
283
288
  // OpenRouter specific settings:
284
289
  include_reasoning: this.settings.includeReasoning,
285
- reasoning: this.settings.reasoning
290
+ reasoning: this.settings.reasoning,
291
+ usage: this.settings.usage
286
292
  }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
287
293
  switch (type) {
288
294
  case "regular": {
@@ -318,7 +324,7 @@ var OpenRouterChatLanguageModel = class {
318
324
  }
319
325
  }
320
326
  async doGenerate(options) {
321
- var _b, _c, _d, _e, _f, _g, _h;
327
+ var _b, _c, _d, _e, _f, _g, _h, _i, _j;
322
328
  const args = this.getArgs(options);
323
329
  const { responseHeaders, value: response } = await postJsonToApi({
324
330
  url: this.config.url({
@@ -339,14 +345,39 @@ var OpenRouterChatLanguageModel = class {
339
345
  if (!choice) {
340
346
  throw new Error("No choice in response");
341
347
  }
342
- return {
348
+ const usageInfo = response.usage ? {
349
+ promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
350
+ completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
351
+ } : {
352
+ promptTokens: 0,
353
+ completionTokens: 0
354
+ };
355
+ const providerMetadata = {};
356
+ if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
357
+ providerMetadata.openrouter = {
358
+ usage: {
359
+ promptTokens: response.usage.prompt_tokens,
360
+ promptTokensDetails: response.usage.prompt_tokens_details ? {
361
+ cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
362
+ } : void 0,
363
+ completionTokens: response.usage.completion_tokens,
364
+ completionTokensDetails: response.usage.completion_tokens_details ? {
365
+ reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
366
+ } : void 0,
367
+ cost: response.usage.cost,
368
+ totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
369
+ }
370
+ };
371
+ }
372
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
373
+ return __spreadValues({
343
374
  response: {
344
375
  id: response.id,
345
376
  modelId: response.model
346
377
  },
347
- text: (_b = choice.message.content) != null ? _b : void 0,
348
- reasoning: (_c = choice.message.reasoning) != null ? _c : void 0,
349
- toolCalls: (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
378
+ text: (_h = choice.message.content) != null ? _h : void 0,
379
+ reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
380
+ toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
350
381
  var _a2;
351
382
  return {
352
383
  toolCallType: "function",
@@ -356,17 +387,15 @@ var OpenRouterChatLanguageModel = class {
356
387
  };
357
388
  }),
358
389
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
359
- usage: {
360
- promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
361
- completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
362
- },
390
+ usage: usageInfo,
363
391
  rawCall: { rawPrompt, rawSettings },
364
392
  rawResponse: { headers: responseHeaders },
365
393
  warnings: [],
366
394
  logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
367
- };
395
+ }, hasProviderMetadata ? { providerMetadata } : {});
368
396
  }
369
397
  async doStream(options) {
398
+ var _a, _c;
370
399
  const args = this.getArgs(options);
371
400
  const { responseHeaders, value: response } = await postJsonToApi({
372
401
  url: this.config.url({
@@ -377,7 +406,9 @@ var OpenRouterChatLanguageModel = class {
377
406
  body: __spreadProps(__spreadValues({}, args), {
378
407
  stream: true,
379
408
  // only include stream_options when in strict compatibility mode:
380
- stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
409
+ stream_options: this.config.compatibility === "strict" ? __spreadValues({
410
+ include_usage: true
411
+ }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
381
412
  }),
382
413
  failedResponseHandler: openrouterFailedResponseHandler,
383
414
  successfulResponseHandler: createEventSourceResponseHandler(
@@ -386,7 +417,7 @@ var OpenRouterChatLanguageModel = class {
386
417
  abortSignal: options.abortSignal,
387
418
  fetch: this.config.fetch
388
419
  });
389
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
420
+ const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
390
421
  const toolCalls = [];
391
422
  let finishReason = "other";
392
423
  let usage = {
@@ -394,11 +425,13 @@ var OpenRouterChatLanguageModel = class {
394
425
  completionTokens: Number.NaN
395
426
  };
396
427
  let logprobs;
428
+ const openrouterUsage = {};
429
+ const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
397
430
  return {
398
431
  stream: response.pipeThrough(
399
432
  new TransformStream({
400
433
  transform(chunk, controller) {
401
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
434
+ var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
402
435
  if (!chunk.success) {
403
436
  finishReason = "error";
404
437
  controller.enqueue({ type: "error", error: chunk.error });
@@ -427,6 +460,20 @@ var OpenRouterChatLanguageModel = class {
427
460
  promptTokens: value.usage.prompt_tokens,
428
461
  completionTokens: value.usage.completion_tokens
429
462
  };
463
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
464
+ if (value.usage.prompt_tokens_details) {
465
+ openrouterUsage.promptTokensDetails = {
466
+ cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
467
+ };
468
+ }
469
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
470
+ if (value.usage.completion_tokens_details) {
471
+ openrouterUsage.completionTokensDetails = {
472
+ reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
473
+ };
474
+ }
475
+ openrouterUsage.cost = value.usage.cost;
476
+ openrouterUsage.totalTokens = value.usage.total_tokens;
430
477
  }
431
478
  const choice = value.choices[0];
432
479
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -471,7 +518,7 @@ var OpenRouterChatLanguageModel = class {
471
518
  message: `Expected 'id' to be a string.`
472
519
  });
473
520
  }
474
- if (((_a2 = toolCallDelta.function) == null ? void 0 : _a2.name) == null) {
521
+ if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
475
522
  throw new InvalidResponseDataError({
476
523
  data: toolCallDelta,
477
524
  message: `Expected 'function.name' to be a string.`
@@ -482,7 +529,7 @@ var OpenRouterChatLanguageModel = class {
482
529
  type: "function",
483
530
  function: {
484
531
  name: toolCallDelta.function.name,
485
- arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
532
+ arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
486
533
  },
487
534
  sent: false
488
535
  };
@@ -490,7 +537,7 @@ var OpenRouterChatLanguageModel = class {
490
537
  if (toolCall2 == null) {
491
538
  throw new Error("Tool call is missing");
492
539
  }
493
- if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
540
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
494
541
  controller.enqueue({
495
542
  type: "tool-call-delta",
496
543
  toolCallType: "function",
@@ -501,7 +548,7 @@ var OpenRouterChatLanguageModel = class {
501
548
  controller.enqueue({
502
549
  type: "tool-call",
503
550
  toolCallType: "function",
504
- toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
551
+ toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
505
552
  toolName: toolCall2.function.name,
506
553
  args: toolCall2.function.arguments
507
554
  });
@@ -513,21 +560,21 @@ var OpenRouterChatLanguageModel = class {
513
560
  if (toolCall == null) {
514
561
  throw new Error("Tool call is missing");
515
562
  }
516
- if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
517
- toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
563
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
564
+ toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
518
565
  }
519
566
  controller.enqueue({
520
567
  type: "tool-call-delta",
521
568
  toolCallType: "function",
522
569
  toolCallId: toolCall.id,
523
570
  toolName: toolCall.function.name,
524
- argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
571
+ argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
525
572
  });
526
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
573
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
527
574
  controller.enqueue({
528
575
  type: "tool-call",
529
576
  toolCallType: "function",
530
- toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
577
+ toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
531
578
  toolName: toolCall.function.name,
532
579
  args: toolCall.function.arguments
533
580
  });
@@ -553,12 +600,19 @@ var OpenRouterChatLanguageModel = class {
553
600
  }
554
601
  }
555
602
  }
556
- controller.enqueue({
603
+ const providerMetadata = {};
604
+ if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
605
+ providerMetadata.openrouter = {
606
+ usage: openrouterUsage
607
+ };
608
+ }
609
+ const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
610
+ controller.enqueue(__spreadValues({
557
611
  type: "finish",
558
612
  finishReason,
559
613
  logprobs,
560
614
  usage
561
- });
615
+ }, hasProviderMetadata ? { providerMetadata } : {}));
562
616
  }
563
617
  })
564
618
  ),
@@ -573,8 +627,15 @@ var OpenRouterChatCompletionBaseResponseSchema = z2.object({
573
627
  model: z2.string().optional(),
574
628
  usage: z2.object({
575
629
  prompt_tokens: z2.number(),
630
+ prompt_tokens_details: z2.object({
631
+ cached_tokens: z2.number()
632
+ }).optional(),
576
633
  completion_tokens: z2.number(),
577
- total_tokens: z2.number()
634
+ completion_tokens_details: z2.object({
635
+ reasoning_tokens: z2.number()
636
+ }).optional(),
637
+ total_tokens: z2.number(),
638
+ cost: z2.number().optional()
578
639
  }).nullish()
579
640
  });
580
641
  var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
@@ -671,14 +732,13 @@ function prepareToolsAndToolChoice(mode) {
671
732
  parameters: tool.parameters
672
733
  }
673
734
  };
674
- } else {
675
- return {
676
- type: "function",
677
- function: {
678
- name: tool.name
679
- }
680
- };
681
735
  }
736
+ return {
737
+ type: "function",
738
+ function: {
739
+ name: tool.name
740
+ }
741
+ };
682
742
  });
683
743
  const toolChoice = mode.toolChoice;
684
744
  if (toolChoice == null) {
@@ -875,7 +935,7 @@ var OpenRouterCompletionLanguageModel = class {
875
935
  }) {
876
936
  var _a, _b;
877
937
  const type = mode.type;
878
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata["openrouter"]) != null ? _a : {};
938
+ const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
879
939
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
880
940
  prompt,
881
941
  inputFormat
@@ -1172,9 +1232,7 @@ function createOpenRouter(options = {}) {
1172
1232
  }
1173
1233
  return createChatModel(modelId, settings);
1174
1234
  };
1175
- const provider = function(modelId, settings) {
1176
- return createLanguageModel(modelId, settings);
1177
- };
1235
+ const provider = (modelId, settings) => createLanguageModel(modelId, settings);
1178
1236
  provider.languageModel = createLanguageModel;
1179
1237
  provider.chat = createChatModel;
1180
1238
  provider.completion = createCompletionModel;