@openrouter/ai-sdk-provider 0.4.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -2
- package/dist/index.d.mts +28 -2
- package/dist/index.d.ts +28 -2
- package/dist/index.js +103 -43
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +103 -43
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +28 -2
- package/dist/internal/index.d.ts +28 -2
- package/dist/internal/index.js +102 -40
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +102 -40
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +18 -13
package/dist/index.mjs
CHANGED
|
@@ -81,14 +81,15 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
81
81
|
const messageCacheControl = getCacheControl(providerMetadata);
|
|
82
82
|
const contentParts = content.map(
|
|
83
83
|
(part) => {
|
|
84
|
-
var _a2, _b2, _c2, _d
|
|
84
|
+
var _a2, _b2, _c2, _d;
|
|
85
|
+
const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
|
|
85
86
|
switch (part.type) {
|
|
86
87
|
case "text":
|
|
87
88
|
return {
|
|
88
89
|
type: "text",
|
|
89
90
|
text: part.text,
|
|
90
91
|
// For text parts, only use part-specific cache control
|
|
91
|
-
cache_control:
|
|
92
|
+
cache_control: cacheControl
|
|
92
93
|
};
|
|
93
94
|
case "image":
|
|
94
95
|
return {
|
|
@@ -99,18 +100,18 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
99
100
|
)}`
|
|
100
101
|
},
|
|
101
102
|
// For image parts, use part-specific or message-level cache control
|
|
102
|
-
cache_control:
|
|
103
|
+
cache_control: cacheControl
|
|
103
104
|
};
|
|
104
105
|
case "file":
|
|
105
106
|
return {
|
|
106
107
|
type: "file",
|
|
107
108
|
file: {
|
|
108
109
|
filename: String(
|
|
109
|
-
(
|
|
110
|
+
(_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
|
|
110
111
|
),
|
|
111
112
|
file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
|
|
112
113
|
},
|
|
113
|
-
cache_control:
|
|
114
|
+
cache_control: cacheControl
|
|
114
115
|
};
|
|
115
116
|
default: {
|
|
116
117
|
const _exhaustiveCheck = part;
|
|
@@ -147,6 +148,7 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
147
148
|
});
|
|
148
149
|
break;
|
|
149
150
|
}
|
|
151
|
+
case "file":
|
|
150
152
|
// TODO: Handle reasoning and redacted-reasoning
|
|
151
153
|
case "reasoning":
|
|
152
154
|
case "redacted-reasoning":
|
|
@@ -262,7 +264,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
262
264
|
}) {
|
|
263
265
|
var _a;
|
|
264
266
|
const type = mode.type;
|
|
265
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
267
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
266
268
|
const baseArgs = __spreadValues(__spreadValues(__spreadValues({
|
|
267
269
|
// model id:
|
|
268
270
|
model: this.modelId,
|
|
@@ -287,7 +289,8 @@ var OpenRouterChatLanguageModel = class {
|
|
|
287
289
|
messages: convertToOpenRouterChatMessages(prompt),
|
|
288
290
|
// OpenRouter specific settings:
|
|
289
291
|
include_reasoning: this.settings.includeReasoning,
|
|
290
|
-
reasoning: this.settings.reasoning
|
|
292
|
+
reasoning: this.settings.reasoning,
|
|
293
|
+
usage: this.settings.usage
|
|
291
294
|
}, this.config.extraBody), this.settings.extraBody), extraCallingBody);
|
|
292
295
|
switch (type) {
|
|
293
296
|
case "regular": {
|
|
@@ -323,7 +326,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
323
326
|
}
|
|
324
327
|
}
|
|
325
328
|
async doGenerate(options) {
|
|
326
|
-
var _b, _c, _d, _e, _f, _g, _h;
|
|
329
|
+
var _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
327
330
|
const args = this.getArgs(options);
|
|
328
331
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
329
332
|
url: this.config.url({
|
|
@@ -344,14 +347,39 @@ var OpenRouterChatLanguageModel = class {
|
|
|
344
347
|
if (!choice) {
|
|
345
348
|
throw new Error("No choice in response");
|
|
346
349
|
}
|
|
347
|
-
|
|
350
|
+
const usageInfo = response.usage ? {
|
|
351
|
+
promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
|
|
352
|
+
completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
|
|
353
|
+
} : {
|
|
354
|
+
promptTokens: 0,
|
|
355
|
+
completionTokens: 0
|
|
356
|
+
};
|
|
357
|
+
const providerMetadata = {};
|
|
358
|
+
if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
|
|
359
|
+
providerMetadata.openrouter = {
|
|
360
|
+
usage: {
|
|
361
|
+
promptTokens: response.usage.prompt_tokens,
|
|
362
|
+
promptTokensDetails: response.usage.prompt_tokens_details ? {
|
|
363
|
+
cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
|
|
364
|
+
} : void 0,
|
|
365
|
+
completionTokens: response.usage.completion_tokens,
|
|
366
|
+
completionTokensDetails: response.usage.completion_tokens_details ? {
|
|
367
|
+
reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
|
|
368
|
+
} : void 0,
|
|
369
|
+
cost: response.usage.cost,
|
|
370
|
+
totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
|
|
371
|
+
}
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
|
|
375
|
+
return __spreadValues({
|
|
348
376
|
response: {
|
|
349
377
|
id: response.id,
|
|
350
378
|
modelId: response.model
|
|
351
379
|
},
|
|
352
|
-
text: (
|
|
353
|
-
reasoning: (
|
|
354
|
-
toolCalls: (
|
|
380
|
+
text: (_h = choice.message.content) != null ? _h : void 0,
|
|
381
|
+
reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
|
|
382
|
+
toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
|
|
355
383
|
var _a2;
|
|
356
384
|
return {
|
|
357
385
|
toolCallType: "function",
|
|
@@ -361,17 +389,15 @@ var OpenRouterChatLanguageModel = class {
|
|
|
361
389
|
};
|
|
362
390
|
}),
|
|
363
391
|
finishReason: mapOpenRouterFinishReason(choice.finish_reason),
|
|
364
|
-
usage:
|
|
365
|
-
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
|
|
366
|
-
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
|
|
367
|
-
},
|
|
392
|
+
usage: usageInfo,
|
|
368
393
|
rawCall: { rawPrompt, rawSettings },
|
|
369
394
|
rawResponse: { headers: responseHeaders },
|
|
370
395
|
warnings: [],
|
|
371
396
|
logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
|
|
372
|
-
};
|
|
397
|
+
}, hasProviderMetadata ? { providerMetadata } : {});
|
|
373
398
|
}
|
|
374
399
|
async doStream(options) {
|
|
400
|
+
var _a, _c;
|
|
375
401
|
const args = this.getArgs(options);
|
|
376
402
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
377
403
|
url: this.config.url({
|
|
@@ -382,7 +408,9 @@ var OpenRouterChatLanguageModel = class {
|
|
|
382
408
|
body: __spreadProps(__spreadValues({}, args), {
|
|
383
409
|
stream: true,
|
|
384
410
|
// only include stream_options when in strict compatibility mode:
|
|
385
|
-
stream_options: this.config.compatibility === "strict" ? {
|
|
411
|
+
stream_options: this.config.compatibility === "strict" ? __spreadValues({
|
|
412
|
+
include_usage: true
|
|
413
|
+
}, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
|
|
386
414
|
}),
|
|
387
415
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
388
416
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
@@ -391,7 +419,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
391
419
|
abortSignal: options.abortSignal,
|
|
392
420
|
fetch: this.config.fetch
|
|
393
421
|
});
|
|
394
|
-
const
|
|
422
|
+
const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
|
|
395
423
|
const toolCalls = [];
|
|
396
424
|
let finishReason = "other";
|
|
397
425
|
let usage = {
|
|
@@ -399,11 +427,13 @@ var OpenRouterChatLanguageModel = class {
|
|
|
399
427
|
completionTokens: Number.NaN
|
|
400
428
|
};
|
|
401
429
|
let logprobs;
|
|
430
|
+
const openrouterUsage = {};
|
|
431
|
+
const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
|
|
402
432
|
return {
|
|
403
433
|
stream: response.pipeThrough(
|
|
404
434
|
new TransformStream({
|
|
405
435
|
transform(chunk, controller) {
|
|
406
|
-
var _a2,
|
|
436
|
+
var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
407
437
|
if (!chunk.success) {
|
|
408
438
|
finishReason = "error";
|
|
409
439
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -432,6 +462,20 @@ var OpenRouterChatLanguageModel = class {
|
|
|
432
462
|
promptTokens: value.usage.prompt_tokens,
|
|
433
463
|
completionTokens: value.usage.completion_tokens
|
|
434
464
|
};
|
|
465
|
+
openrouterUsage.promptTokens = value.usage.prompt_tokens;
|
|
466
|
+
if (value.usage.prompt_tokens_details) {
|
|
467
|
+
openrouterUsage.promptTokensDetails = {
|
|
468
|
+
cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
openrouterUsage.completionTokens = value.usage.completion_tokens;
|
|
472
|
+
if (value.usage.completion_tokens_details) {
|
|
473
|
+
openrouterUsage.completionTokensDetails = {
|
|
474
|
+
reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
|
|
475
|
+
};
|
|
476
|
+
}
|
|
477
|
+
openrouterUsage.cost = value.usage.cost;
|
|
478
|
+
openrouterUsage.totalTokens = value.usage.total_tokens;
|
|
435
479
|
}
|
|
436
480
|
const choice = value.choices[0];
|
|
437
481
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
@@ -476,7 +520,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
476
520
|
message: `Expected 'id' to be a string.`
|
|
477
521
|
});
|
|
478
522
|
}
|
|
479
|
-
if (((
|
|
523
|
+
if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
|
|
480
524
|
throw new InvalidResponseDataError({
|
|
481
525
|
data: toolCallDelta,
|
|
482
526
|
message: `Expected 'function.name' to be a string.`
|
|
@@ -487,7 +531,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
487
531
|
type: "function",
|
|
488
532
|
function: {
|
|
489
533
|
name: toolCallDelta.function.name,
|
|
490
|
-
arguments: (
|
|
534
|
+
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
|
|
491
535
|
},
|
|
492
536
|
sent: false
|
|
493
537
|
};
|
|
@@ -495,7 +539,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
495
539
|
if (toolCall2 == null) {
|
|
496
540
|
throw new Error("Tool call is missing");
|
|
497
541
|
}
|
|
498
|
-
if (((
|
|
542
|
+
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
|
|
499
543
|
controller.enqueue({
|
|
500
544
|
type: "tool-call-delta",
|
|
501
545
|
toolCallType: "function",
|
|
@@ -506,7 +550,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
506
550
|
controller.enqueue({
|
|
507
551
|
type: "tool-call",
|
|
508
552
|
toolCallType: "function",
|
|
509
|
-
toolCallId: (
|
|
553
|
+
toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
|
|
510
554
|
toolName: toolCall2.function.name,
|
|
511
555
|
args: toolCall2.function.arguments
|
|
512
556
|
});
|
|
@@ -518,21 +562,21 @@ var OpenRouterChatLanguageModel = class {
|
|
|
518
562
|
if (toolCall == null) {
|
|
519
563
|
throw new Error("Tool call is missing");
|
|
520
564
|
}
|
|
521
|
-
if (((
|
|
522
|
-
toolCall.function.arguments += (
|
|
565
|
+
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
|
|
566
|
+
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
|
|
523
567
|
}
|
|
524
568
|
controller.enqueue({
|
|
525
569
|
type: "tool-call-delta",
|
|
526
570
|
toolCallType: "function",
|
|
527
571
|
toolCallId: toolCall.id,
|
|
528
572
|
toolName: toolCall.function.name,
|
|
529
|
-
argsTextDelta: (
|
|
573
|
+
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
|
|
530
574
|
});
|
|
531
|
-
if (((
|
|
575
|
+
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
532
576
|
controller.enqueue({
|
|
533
577
|
type: "tool-call",
|
|
534
578
|
toolCallType: "function",
|
|
535
|
-
toolCallId: (
|
|
579
|
+
toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
|
|
536
580
|
toolName: toolCall.function.name,
|
|
537
581
|
args: toolCall.function.arguments
|
|
538
582
|
});
|
|
@@ -558,12 +602,19 @@ var OpenRouterChatLanguageModel = class {
|
|
|
558
602
|
}
|
|
559
603
|
}
|
|
560
604
|
}
|
|
561
|
-
|
|
605
|
+
const providerMetadata = {};
|
|
606
|
+
if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
|
|
607
|
+
providerMetadata.openrouter = {
|
|
608
|
+
usage: openrouterUsage
|
|
609
|
+
};
|
|
610
|
+
}
|
|
611
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
|
|
612
|
+
controller.enqueue(__spreadValues({
|
|
562
613
|
type: "finish",
|
|
563
614
|
finishReason,
|
|
564
615
|
logprobs,
|
|
565
616
|
usage
|
|
566
|
-
});
|
|
617
|
+
}, hasProviderMetadata ? { providerMetadata } : {}));
|
|
567
618
|
}
|
|
568
619
|
})
|
|
569
620
|
),
|
|
@@ -578,8 +629,15 @@ var OpenRouterChatCompletionBaseResponseSchema = z2.object({
|
|
|
578
629
|
model: z2.string().optional(),
|
|
579
630
|
usage: z2.object({
|
|
580
631
|
prompt_tokens: z2.number(),
|
|
632
|
+
prompt_tokens_details: z2.object({
|
|
633
|
+
cached_tokens: z2.number()
|
|
634
|
+
}).optional(),
|
|
581
635
|
completion_tokens: z2.number(),
|
|
582
|
-
|
|
636
|
+
completion_tokens_details: z2.object({
|
|
637
|
+
reasoning_tokens: z2.number()
|
|
638
|
+
}).optional(),
|
|
639
|
+
total_tokens: z2.number(),
|
|
640
|
+
cost: z2.number().optional()
|
|
583
641
|
}).nullish()
|
|
584
642
|
});
|
|
585
643
|
var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
|
|
@@ -676,14 +734,13 @@ function prepareToolsAndToolChoice(mode) {
|
|
|
676
734
|
parameters: tool.parameters
|
|
677
735
|
}
|
|
678
736
|
};
|
|
679
|
-
} else {
|
|
680
|
-
return {
|
|
681
|
-
type: "function",
|
|
682
|
-
function: {
|
|
683
|
-
name: tool.name
|
|
684
|
-
}
|
|
685
|
-
};
|
|
686
737
|
}
|
|
738
|
+
return {
|
|
739
|
+
type: "function",
|
|
740
|
+
function: {
|
|
741
|
+
name: tool.name
|
|
742
|
+
}
|
|
743
|
+
};
|
|
687
744
|
});
|
|
688
745
|
const toolChoice = mode.toolChoice;
|
|
689
746
|
if (toolChoice == null) {
|
|
@@ -802,6 +859,11 @@ ${userMessage}
|
|
|
802
859
|
functionality: "redacted reasoning messages"
|
|
803
860
|
});
|
|
804
861
|
}
|
|
862
|
+
case "file": {
|
|
863
|
+
throw new UnsupportedFunctionalityError2({
|
|
864
|
+
functionality: "file attachments"
|
|
865
|
+
});
|
|
866
|
+
}
|
|
805
867
|
default: {
|
|
806
868
|
const _exhaustiveCheck = part;
|
|
807
869
|
throw new Error(
|
|
@@ -880,7 +942,7 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
880
942
|
}) {
|
|
881
943
|
var _a, _b;
|
|
882
944
|
const type = mode.type;
|
|
883
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
945
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
884
946
|
const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
|
|
885
947
|
prompt,
|
|
886
948
|
inputFormat
|
|
@@ -1177,9 +1239,7 @@ function createOpenRouter(options = {}) {
|
|
|
1177
1239
|
}
|
|
1178
1240
|
return createChatModel(modelId, settings);
|
|
1179
1241
|
};
|
|
1180
|
-
const provider =
|
|
1181
|
-
return createLanguageModel(modelId, settings);
|
|
1182
|
-
};
|
|
1242
|
+
const provider = (modelId, settings) => createLanguageModel(modelId, settings);
|
|
1183
1243
|
provider.languageModel = createLanguageModel;
|
|
1184
1244
|
provider.chat = createChatModel;
|
|
1185
1245
|
provider.completion = createCompletionModel;
|