@openrouter/ai-sdk-provider 0.4.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -2
- package/dist/index.d.mts +28 -2
- package/dist/index.d.ts +28 -2
- package/dist/index.js +103 -43
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +103 -43
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +28 -2
- package/dist/internal/index.d.ts +28 -2
- package/dist/internal/index.js +102 -40
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +102 -40
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +18 -13
package/dist/internal/index.mjs
CHANGED
|
@@ -78,14 +78,15 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
78
78
|
const messageCacheControl = getCacheControl(providerMetadata);
|
|
79
79
|
const contentParts = content.map(
|
|
80
80
|
(part) => {
|
|
81
|
-
var _a2, _b2, _c2, _d
|
|
81
|
+
var _a2, _b2, _c2, _d;
|
|
82
|
+
const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
|
|
82
83
|
switch (part.type) {
|
|
83
84
|
case "text":
|
|
84
85
|
return {
|
|
85
86
|
type: "text",
|
|
86
87
|
text: part.text,
|
|
87
88
|
// For text parts, only use part-specific cache control
|
|
88
|
-
cache_control:
|
|
89
|
+
cache_control: cacheControl
|
|
89
90
|
};
|
|
90
91
|
case "image":
|
|
91
92
|
return {
|
|
@@ -96,18 +97,18 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
96
97
|
)}`
|
|
97
98
|
},
|
|
98
99
|
// For image parts, use part-specific or message-level cache control
|
|
99
|
-
cache_control:
|
|
100
|
+
cache_control: cacheControl
|
|
100
101
|
};
|
|
101
102
|
case "file":
|
|
102
103
|
return {
|
|
103
104
|
type: "file",
|
|
104
105
|
file: {
|
|
105
106
|
filename: String(
|
|
106
|
-
(
|
|
107
|
+
(_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
|
|
107
108
|
),
|
|
108
109
|
file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
|
|
109
110
|
},
|
|
110
|
-
cache_control:
|
|
111
|
+
cache_control: cacheControl
|
|
111
112
|
};
|
|
112
113
|
default: {
|
|
113
114
|
const _exhaustiveCheck = part;
|
|
@@ -144,6 +145,7 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
144
145
|
});
|
|
145
146
|
break;
|
|
146
147
|
}
|
|
148
|
+
case "file":
|
|
147
149
|
// TODO: Handle reasoning and redacted-reasoning
|
|
148
150
|
case "reasoning":
|
|
149
151
|
case "redacted-reasoning":
|
|
@@ -259,7 +261,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
259
261
|
}) {
|
|
260
262
|
var _a;
|
|
261
263
|
const type = mode.type;
|
|
262
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
264
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
263
265
|
const baseArgs = __spreadValues(__spreadValues(__spreadValues({
|
|
264
266
|
// model id:
|
|
265
267
|
model: this.modelId,
|
|
@@ -284,7 +286,8 @@ var OpenRouterChatLanguageModel = class {
|
|
|
284
286
|
messages: convertToOpenRouterChatMessages(prompt),
|
|
285
287
|
// OpenRouter specific settings:
|
|
286
288
|
include_reasoning: this.settings.includeReasoning,
|
|
287
|
-
reasoning: this.settings.reasoning
|
|
289
|
+
reasoning: this.settings.reasoning,
|
|
290
|
+
usage: this.settings.usage
|
|
288
291
|
}, this.config.extraBody), this.settings.extraBody), extraCallingBody);
|
|
289
292
|
switch (type) {
|
|
290
293
|
case "regular": {
|
|
@@ -320,7 +323,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
320
323
|
}
|
|
321
324
|
}
|
|
322
325
|
async doGenerate(options) {
|
|
323
|
-
var _b, _c, _d, _e, _f, _g, _h;
|
|
326
|
+
var _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
324
327
|
const args = this.getArgs(options);
|
|
325
328
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
326
329
|
url: this.config.url({
|
|
@@ -341,14 +344,39 @@ var OpenRouterChatLanguageModel = class {
|
|
|
341
344
|
if (!choice) {
|
|
342
345
|
throw new Error("No choice in response");
|
|
343
346
|
}
|
|
344
|
-
|
|
347
|
+
const usageInfo = response.usage ? {
|
|
348
|
+
promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
|
|
349
|
+
completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
|
|
350
|
+
} : {
|
|
351
|
+
promptTokens: 0,
|
|
352
|
+
completionTokens: 0
|
|
353
|
+
};
|
|
354
|
+
const providerMetadata = {};
|
|
355
|
+
if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
|
|
356
|
+
providerMetadata.openrouter = {
|
|
357
|
+
usage: {
|
|
358
|
+
promptTokens: response.usage.prompt_tokens,
|
|
359
|
+
promptTokensDetails: response.usage.prompt_tokens_details ? {
|
|
360
|
+
cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
|
|
361
|
+
} : void 0,
|
|
362
|
+
completionTokens: response.usage.completion_tokens,
|
|
363
|
+
completionTokensDetails: response.usage.completion_tokens_details ? {
|
|
364
|
+
reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
|
|
365
|
+
} : void 0,
|
|
366
|
+
cost: response.usage.cost,
|
|
367
|
+
totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
|
|
368
|
+
}
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
|
|
372
|
+
return __spreadValues({
|
|
345
373
|
response: {
|
|
346
374
|
id: response.id,
|
|
347
375
|
modelId: response.model
|
|
348
376
|
},
|
|
349
|
-
text: (
|
|
350
|
-
reasoning: (
|
|
351
|
-
toolCalls: (
|
|
377
|
+
text: (_h = choice.message.content) != null ? _h : void 0,
|
|
378
|
+
reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
|
|
379
|
+
toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
|
|
352
380
|
var _a2;
|
|
353
381
|
return {
|
|
354
382
|
toolCallType: "function",
|
|
@@ -358,17 +386,15 @@ var OpenRouterChatLanguageModel = class {
|
|
|
358
386
|
};
|
|
359
387
|
}),
|
|
360
388
|
finishReason: mapOpenRouterFinishReason(choice.finish_reason),
|
|
361
|
-
usage:
|
|
362
|
-
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
|
|
363
|
-
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
|
|
364
|
-
},
|
|
389
|
+
usage: usageInfo,
|
|
365
390
|
rawCall: { rawPrompt, rawSettings },
|
|
366
391
|
rawResponse: { headers: responseHeaders },
|
|
367
392
|
warnings: [],
|
|
368
393
|
logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
|
|
369
|
-
};
|
|
394
|
+
}, hasProviderMetadata ? { providerMetadata } : {});
|
|
370
395
|
}
|
|
371
396
|
async doStream(options) {
|
|
397
|
+
var _a, _c;
|
|
372
398
|
const args = this.getArgs(options);
|
|
373
399
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
374
400
|
url: this.config.url({
|
|
@@ -379,7 +405,9 @@ var OpenRouterChatLanguageModel = class {
|
|
|
379
405
|
body: __spreadProps(__spreadValues({}, args), {
|
|
380
406
|
stream: true,
|
|
381
407
|
// only include stream_options when in strict compatibility mode:
|
|
382
|
-
stream_options: this.config.compatibility === "strict" ? {
|
|
408
|
+
stream_options: this.config.compatibility === "strict" ? __spreadValues({
|
|
409
|
+
include_usage: true
|
|
410
|
+
}, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
|
|
383
411
|
}),
|
|
384
412
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
385
413
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
@@ -388,7 +416,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
388
416
|
abortSignal: options.abortSignal,
|
|
389
417
|
fetch: this.config.fetch
|
|
390
418
|
});
|
|
391
|
-
const
|
|
419
|
+
const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
|
|
392
420
|
const toolCalls = [];
|
|
393
421
|
let finishReason = "other";
|
|
394
422
|
let usage = {
|
|
@@ -396,11 +424,13 @@ var OpenRouterChatLanguageModel = class {
|
|
|
396
424
|
completionTokens: Number.NaN
|
|
397
425
|
};
|
|
398
426
|
let logprobs;
|
|
427
|
+
const openrouterUsage = {};
|
|
428
|
+
const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
|
|
399
429
|
return {
|
|
400
430
|
stream: response.pipeThrough(
|
|
401
431
|
new TransformStream({
|
|
402
432
|
transform(chunk, controller) {
|
|
403
|
-
var _a2,
|
|
433
|
+
var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
404
434
|
if (!chunk.success) {
|
|
405
435
|
finishReason = "error";
|
|
406
436
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -429,6 +459,20 @@ var OpenRouterChatLanguageModel = class {
|
|
|
429
459
|
promptTokens: value.usage.prompt_tokens,
|
|
430
460
|
completionTokens: value.usage.completion_tokens
|
|
431
461
|
};
|
|
462
|
+
openrouterUsage.promptTokens = value.usage.prompt_tokens;
|
|
463
|
+
if (value.usage.prompt_tokens_details) {
|
|
464
|
+
openrouterUsage.promptTokensDetails = {
|
|
465
|
+
cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
|
|
466
|
+
};
|
|
467
|
+
}
|
|
468
|
+
openrouterUsage.completionTokens = value.usage.completion_tokens;
|
|
469
|
+
if (value.usage.completion_tokens_details) {
|
|
470
|
+
openrouterUsage.completionTokensDetails = {
|
|
471
|
+
reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
|
|
472
|
+
};
|
|
473
|
+
}
|
|
474
|
+
openrouterUsage.cost = value.usage.cost;
|
|
475
|
+
openrouterUsage.totalTokens = value.usage.total_tokens;
|
|
432
476
|
}
|
|
433
477
|
const choice = value.choices[0];
|
|
434
478
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
@@ -473,7 +517,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
473
517
|
message: `Expected 'id' to be a string.`
|
|
474
518
|
});
|
|
475
519
|
}
|
|
476
|
-
if (((
|
|
520
|
+
if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
|
|
477
521
|
throw new InvalidResponseDataError({
|
|
478
522
|
data: toolCallDelta,
|
|
479
523
|
message: `Expected 'function.name' to be a string.`
|
|
@@ -484,7 +528,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
484
528
|
type: "function",
|
|
485
529
|
function: {
|
|
486
530
|
name: toolCallDelta.function.name,
|
|
487
|
-
arguments: (
|
|
531
|
+
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
|
|
488
532
|
},
|
|
489
533
|
sent: false
|
|
490
534
|
};
|
|
@@ -492,7 +536,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
492
536
|
if (toolCall2 == null) {
|
|
493
537
|
throw new Error("Tool call is missing");
|
|
494
538
|
}
|
|
495
|
-
if (((
|
|
539
|
+
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
|
|
496
540
|
controller.enqueue({
|
|
497
541
|
type: "tool-call-delta",
|
|
498
542
|
toolCallType: "function",
|
|
@@ -503,7 +547,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
503
547
|
controller.enqueue({
|
|
504
548
|
type: "tool-call",
|
|
505
549
|
toolCallType: "function",
|
|
506
|
-
toolCallId: (
|
|
550
|
+
toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
|
|
507
551
|
toolName: toolCall2.function.name,
|
|
508
552
|
args: toolCall2.function.arguments
|
|
509
553
|
});
|
|
@@ -515,21 +559,21 @@ var OpenRouterChatLanguageModel = class {
|
|
|
515
559
|
if (toolCall == null) {
|
|
516
560
|
throw new Error("Tool call is missing");
|
|
517
561
|
}
|
|
518
|
-
if (((
|
|
519
|
-
toolCall.function.arguments += (
|
|
562
|
+
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
|
|
563
|
+
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
|
|
520
564
|
}
|
|
521
565
|
controller.enqueue({
|
|
522
566
|
type: "tool-call-delta",
|
|
523
567
|
toolCallType: "function",
|
|
524
568
|
toolCallId: toolCall.id,
|
|
525
569
|
toolName: toolCall.function.name,
|
|
526
|
-
argsTextDelta: (
|
|
570
|
+
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
|
|
527
571
|
});
|
|
528
|
-
if (((
|
|
572
|
+
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
529
573
|
controller.enqueue({
|
|
530
574
|
type: "tool-call",
|
|
531
575
|
toolCallType: "function",
|
|
532
|
-
toolCallId: (
|
|
576
|
+
toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
|
|
533
577
|
toolName: toolCall.function.name,
|
|
534
578
|
args: toolCall.function.arguments
|
|
535
579
|
});
|
|
@@ -555,12 +599,19 @@ var OpenRouterChatLanguageModel = class {
|
|
|
555
599
|
}
|
|
556
600
|
}
|
|
557
601
|
}
|
|
558
|
-
|
|
602
|
+
const providerMetadata = {};
|
|
603
|
+
if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
|
|
604
|
+
providerMetadata.openrouter = {
|
|
605
|
+
usage: openrouterUsage
|
|
606
|
+
};
|
|
607
|
+
}
|
|
608
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
|
|
609
|
+
controller.enqueue(__spreadValues({
|
|
559
610
|
type: "finish",
|
|
560
611
|
finishReason,
|
|
561
612
|
logprobs,
|
|
562
613
|
usage
|
|
563
|
-
});
|
|
614
|
+
}, hasProviderMetadata ? { providerMetadata } : {}));
|
|
564
615
|
}
|
|
565
616
|
})
|
|
566
617
|
),
|
|
@@ -575,8 +626,15 @@ var OpenRouterChatCompletionBaseResponseSchema = z2.object({
|
|
|
575
626
|
model: z2.string().optional(),
|
|
576
627
|
usage: z2.object({
|
|
577
628
|
prompt_tokens: z2.number(),
|
|
629
|
+
prompt_tokens_details: z2.object({
|
|
630
|
+
cached_tokens: z2.number()
|
|
631
|
+
}).optional(),
|
|
578
632
|
completion_tokens: z2.number(),
|
|
579
|
-
|
|
633
|
+
completion_tokens_details: z2.object({
|
|
634
|
+
reasoning_tokens: z2.number()
|
|
635
|
+
}).optional(),
|
|
636
|
+
total_tokens: z2.number(),
|
|
637
|
+
cost: z2.number().optional()
|
|
580
638
|
}).nullish()
|
|
581
639
|
});
|
|
582
640
|
var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
|
|
@@ -673,14 +731,13 @@ function prepareToolsAndToolChoice(mode) {
|
|
|
673
731
|
parameters: tool.parameters
|
|
674
732
|
}
|
|
675
733
|
};
|
|
676
|
-
} else {
|
|
677
|
-
return {
|
|
678
|
-
type: "function",
|
|
679
|
-
function: {
|
|
680
|
-
name: tool.name
|
|
681
|
-
}
|
|
682
|
-
};
|
|
683
734
|
}
|
|
735
|
+
return {
|
|
736
|
+
type: "function",
|
|
737
|
+
function: {
|
|
738
|
+
name: tool.name
|
|
739
|
+
}
|
|
740
|
+
};
|
|
684
741
|
});
|
|
685
742
|
const toolChoice = mode.toolChoice;
|
|
686
743
|
if (toolChoice == null) {
|
|
@@ -799,6 +856,11 @@ ${userMessage}
|
|
|
799
856
|
functionality: "redacted reasoning messages"
|
|
800
857
|
});
|
|
801
858
|
}
|
|
859
|
+
case "file": {
|
|
860
|
+
throw new UnsupportedFunctionalityError2({
|
|
861
|
+
functionality: "file attachments"
|
|
862
|
+
});
|
|
863
|
+
}
|
|
802
864
|
default: {
|
|
803
865
|
const _exhaustiveCheck = part;
|
|
804
866
|
throw new Error(
|
|
@@ -877,7 +939,7 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
877
939
|
}) {
|
|
878
940
|
var _a, _b;
|
|
879
941
|
const type = mode.type;
|
|
880
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
942
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
881
943
|
const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
|
|
882
944
|
prompt,
|
|
883
945
|
inputFormat
|