@openrouter/ai-sdk-provider 0.4.1 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -0
- package/dist/index.js +86 -59
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +86 -59
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +86 -59
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +86 -59
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -51,39 +51,61 @@ import { z as z2 } from "zod";
|
|
|
51
51
|
// src/convert-to-openrouter-chat-messages.ts
|
|
52
52
|
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
53
53
|
function convertToOpenRouterChatMessages(prompt) {
|
|
54
|
-
var _a;
|
|
54
|
+
var _a, _b, _c;
|
|
55
55
|
const messages = [];
|
|
56
|
-
|
|
56
|
+
function getCacheControl(providerMetadata) {
|
|
57
|
+
var _a2;
|
|
58
|
+
const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
|
|
59
|
+
const cacheControlValue = (_a2 = anthropic == null ? void 0 : anthropic.cacheControl) != null ? _a2 : anthropic == null ? void 0 : anthropic.cache_control;
|
|
60
|
+
return cacheControlValue;
|
|
61
|
+
}
|
|
62
|
+
for (const { role, content, providerMetadata } of prompt) {
|
|
57
63
|
switch (role) {
|
|
58
64
|
case "system": {
|
|
59
|
-
messages.push({
|
|
65
|
+
messages.push({
|
|
66
|
+
role: "system",
|
|
67
|
+
content,
|
|
68
|
+
cache_control: getCacheControl(providerMetadata)
|
|
69
|
+
});
|
|
60
70
|
break;
|
|
61
71
|
}
|
|
62
72
|
case "user": {
|
|
63
73
|
if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
|
|
64
|
-
messages.push({
|
|
74
|
+
messages.push({
|
|
75
|
+
role: "user",
|
|
76
|
+
content: content[0].text,
|
|
77
|
+
cache_control: (_b = getCacheControl(providerMetadata)) != null ? _b : getCacheControl(content[0].providerMetadata)
|
|
78
|
+
});
|
|
65
79
|
break;
|
|
66
80
|
}
|
|
81
|
+
const messageCacheControl = getCacheControl(providerMetadata);
|
|
67
82
|
const contentParts = content.map(
|
|
68
83
|
(part) => {
|
|
69
|
-
var _a2;
|
|
84
|
+
var _a2, _b2, _c2;
|
|
70
85
|
switch (part.type) {
|
|
71
86
|
case "text":
|
|
72
87
|
return {
|
|
73
88
|
type: "text",
|
|
74
|
-
text: part.text
|
|
89
|
+
text: part.text,
|
|
90
|
+
// For text parts, only use part-specific cache control
|
|
91
|
+
cache_control: getCacheControl(part.providerMetadata)
|
|
75
92
|
};
|
|
76
93
|
case "image":
|
|
77
94
|
return {
|
|
78
95
|
type: "image_url",
|
|
79
96
|
image_url: {
|
|
80
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a2 = part.mimeType) != null ? _a2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
|
|
81
|
-
|
|
97
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${(_a2 = part.mimeType) != null ? _a2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
|
|
98
|
+
part.image
|
|
99
|
+
)}`
|
|
100
|
+
},
|
|
101
|
+
// For image parts, use part-specific or message-level cache control
|
|
102
|
+
cache_control: (_b2 = getCacheControl(part.providerMetadata)) != null ? _b2 : messageCacheControl
|
|
82
103
|
};
|
|
83
104
|
case "file":
|
|
84
105
|
return {
|
|
85
106
|
type: "text",
|
|
86
|
-
text: part.data instanceof URL ? part.data.toString() : part.data
|
|
107
|
+
text: part.data instanceof URL ? part.data.toString() : part.data,
|
|
108
|
+
cache_control: (_c2 = getCacheControl(part.providerMetadata)) != null ? _c2 : messageCacheControl
|
|
87
109
|
};
|
|
88
110
|
default: {
|
|
89
111
|
const _exhaustiveCheck = part;
|
|
@@ -133,7 +155,8 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
133
155
|
messages.push({
|
|
134
156
|
role: "assistant",
|
|
135
157
|
content: text,
|
|
136
|
-
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
158
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
159
|
+
cache_control: getCacheControl(providerMetadata)
|
|
137
160
|
});
|
|
138
161
|
break;
|
|
139
162
|
}
|
|
@@ -142,7 +165,8 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
142
165
|
messages.push({
|
|
143
166
|
role: "tool",
|
|
144
167
|
tool_call_id: toolResponse.toolCallId,
|
|
145
|
-
content: JSON.stringify(toolResponse.result)
|
|
168
|
+
content: JSON.stringify(toolResponse.result),
|
|
169
|
+
cache_control: (_c = getCacheControl(providerMetadata)) != null ? _c : getCacheControl(toolResponse.providerMetadata)
|
|
146
170
|
});
|
|
147
171
|
}
|
|
148
172
|
break;
|
|
@@ -189,7 +213,7 @@ function mapOpenRouterFinishReason(finishReason) {
|
|
|
189
213
|
// src/openrouter-error.ts
|
|
190
214
|
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
191
215
|
import { z } from "zod";
|
|
192
|
-
var
|
|
216
|
+
var OpenRouterErrorResponseSchema = z.object({
|
|
193
217
|
error: z.object({
|
|
194
218
|
message: z.string(),
|
|
195
219
|
type: z.string(),
|
|
@@ -198,7 +222,7 @@ var openAIErrorDataSchema = z.object({
|
|
|
198
222
|
})
|
|
199
223
|
});
|
|
200
224
|
var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
|
|
201
|
-
errorSchema:
|
|
225
|
+
errorSchema: OpenRouterErrorResponseSchema,
|
|
202
226
|
errorToMessage: (data) => data.error.message
|
|
203
227
|
});
|
|
204
228
|
|
|
@@ -294,7 +318,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
294
318
|
}
|
|
295
319
|
}
|
|
296
320
|
async doGenerate(options) {
|
|
297
|
-
var _b, _c, _d;
|
|
321
|
+
var _b, _c, _d, _e, _f, _g, _h;
|
|
298
322
|
const args = this.getArgs(options);
|
|
299
323
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
300
324
|
url: this.config.url({
|
|
@@ -305,17 +329,21 @@ var OpenRouterChatLanguageModel = class {
|
|
|
305
329
|
body: args,
|
|
306
330
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
307
331
|
successfulResponseHandler: createJsonResponseHandler(
|
|
308
|
-
|
|
332
|
+
OpenRouterNonStreamChatCompletionResponseSchema
|
|
309
333
|
),
|
|
310
334
|
abortSignal: options.abortSignal,
|
|
311
335
|
fetch: this.config.fetch
|
|
312
336
|
});
|
|
313
337
|
const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
|
|
314
338
|
const choice = response.choices[0];
|
|
315
|
-
if (choice
|
|
339
|
+
if (!choice) {
|
|
316
340
|
throw new Error("No choice in response");
|
|
317
341
|
}
|
|
318
342
|
return {
|
|
343
|
+
response: {
|
|
344
|
+
id: response.id,
|
|
345
|
+
modelId: response.model
|
|
346
|
+
},
|
|
319
347
|
text: (_b = choice.message.content) != null ? _b : void 0,
|
|
320
348
|
reasoning: (_c = choice.message.reasoning) != null ? _c : void 0,
|
|
321
349
|
toolCalls: (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
|
|
@@ -329,8 +357,8 @@ var OpenRouterChatLanguageModel = class {
|
|
|
329
357
|
}),
|
|
330
358
|
finishReason: mapOpenRouterFinishReason(choice.finish_reason),
|
|
331
359
|
usage: {
|
|
332
|
-
promptTokens: response.usage.prompt_tokens,
|
|
333
|
-
completionTokens: response.usage.completion_tokens
|
|
360
|
+
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
|
|
361
|
+
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
|
|
334
362
|
},
|
|
335
363
|
rawCall: { rawPrompt, rawSettings },
|
|
336
364
|
rawResponse: { headers: responseHeaders },
|
|
@@ -353,7 +381,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
353
381
|
}),
|
|
354
382
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
355
383
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
356
|
-
|
|
384
|
+
OpenRouterStreamChatCompletionChunkSchema
|
|
357
385
|
),
|
|
358
386
|
abortSignal: options.abortSignal,
|
|
359
387
|
fetch: this.config.fetch
|
|
@@ -388,6 +416,12 @@ var OpenRouterChatLanguageModel = class {
|
|
|
388
416
|
id: value.id
|
|
389
417
|
});
|
|
390
418
|
}
|
|
419
|
+
if (value.model) {
|
|
420
|
+
controller.enqueue({
|
|
421
|
+
type: "response-metadata",
|
|
422
|
+
modelId: value.model
|
|
423
|
+
});
|
|
424
|
+
}
|
|
391
425
|
if (value.usage != null) {
|
|
392
426
|
usage = {
|
|
393
427
|
promptTokens: value.usage.prompt_tokens,
|
|
@@ -515,7 +549,16 @@ var OpenRouterChatLanguageModel = class {
|
|
|
515
549
|
};
|
|
516
550
|
}
|
|
517
551
|
};
|
|
518
|
-
var
|
|
552
|
+
var OpenRouterChatCompletionBaseResponseSchema = z2.object({
|
|
553
|
+
id: z2.string().optional(),
|
|
554
|
+
model: z2.string().optional(),
|
|
555
|
+
usage: z2.object({
|
|
556
|
+
prompt_tokens: z2.number(),
|
|
557
|
+
completion_tokens: z2.number(),
|
|
558
|
+
total_tokens: z2.number()
|
|
559
|
+
}).nullish()
|
|
560
|
+
});
|
|
561
|
+
var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
|
|
519
562
|
choices: z2.array(
|
|
520
563
|
z2.object({
|
|
521
564
|
message: z2.object({
|
|
@@ -550,15 +593,10 @@ var openAIChatResponseSchema = z2.object({
|
|
|
550
593
|
}).nullable().optional(),
|
|
551
594
|
finish_reason: z2.string().optional().nullable()
|
|
552
595
|
})
|
|
553
|
-
)
|
|
554
|
-
usage: z2.object({
|
|
555
|
-
prompt_tokens: z2.number(),
|
|
556
|
-
completion_tokens: z2.number()
|
|
557
|
-
})
|
|
596
|
+
)
|
|
558
597
|
});
|
|
559
|
-
var
|
|
560
|
-
|
|
561
|
-
id: z2.string().optional(),
|
|
598
|
+
var OpenRouterStreamChatCompletionChunkSchema = z2.union([
|
|
599
|
+
OpenRouterChatCompletionBaseResponseSchema.extend({
|
|
562
600
|
choices: z2.array(
|
|
563
601
|
z2.object({
|
|
564
602
|
delta: z2.object({
|
|
@@ -594,13 +632,9 @@ var openrouterChatChunkSchema = z2.union([
|
|
|
594
632
|
finish_reason: z2.string().nullable().optional(),
|
|
595
633
|
index: z2.number()
|
|
596
634
|
})
|
|
597
|
-
)
|
|
598
|
-
usage: z2.object({
|
|
599
|
-
prompt_tokens: z2.number(),
|
|
600
|
-
completion_tokens: z2.number()
|
|
601
|
-
}).nullish()
|
|
635
|
+
)
|
|
602
636
|
}),
|
|
603
|
-
|
|
637
|
+
OpenRouterErrorResponseSchema
|
|
604
638
|
]);
|
|
605
639
|
function prepareToolsAndToolChoice(mode) {
|
|
606
640
|
var _a;
|
|
@@ -886,6 +920,7 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
886
920
|
}
|
|
887
921
|
}
|
|
888
922
|
async doGenerate(options) {
|
|
923
|
+
var _b, _c, _d, _e, _f;
|
|
889
924
|
const args = this.getArgs(options);
|
|
890
925
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
891
926
|
url: this.config.url({
|
|
@@ -896,22 +931,29 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
896
931
|
body: args,
|
|
897
932
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
898
933
|
successfulResponseHandler: createJsonResponseHandler2(
|
|
899
|
-
|
|
934
|
+
OpenRouterCompletionChunkSchema
|
|
900
935
|
),
|
|
901
936
|
abortSignal: options.abortSignal,
|
|
902
937
|
fetch: this.config.fetch
|
|
903
938
|
});
|
|
904
939
|
const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
|
|
940
|
+
if ("error" in response) {
|
|
941
|
+
throw new Error(`${response.error.message}`);
|
|
942
|
+
}
|
|
905
943
|
const choice = response.choices[0];
|
|
906
944
|
if (!choice) {
|
|
907
945
|
throw new Error("No choice in OpenRouter completion response");
|
|
908
946
|
}
|
|
909
947
|
return {
|
|
910
|
-
|
|
948
|
+
response: {
|
|
949
|
+
id: response.id,
|
|
950
|
+
modelId: response.model
|
|
951
|
+
},
|
|
952
|
+
text: (_b = choice.text) != null ? _b : "",
|
|
911
953
|
reasoning: choice.reasoning || void 0,
|
|
912
954
|
usage: {
|
|
913
|
-
promptTokens: response.usage.prompt_tokens,
|
|
914
|
-
completionTokens: response.usage.completion_tokens
|
|
955
|
+
promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : 0,
|
|
956
|
+
completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : 0
|
|
915
957
|
},
|
|
916
958
|
finishReason: mapOpenRouterFinishReason(choice.finish_reason),
|
|
917
959
|
logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
|
|
@@ -935,7 +977,7 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
935
977
|
}),
|
|
936
978
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
937
979
|
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
938
|
-
|
|
980
|
+
OpenRouterCompletionChunkSchema
|
|
939
981
|
),
|
|
940
982
|
abortSignal: options.abortSignal,
|
|
941
983
|
fetch: this.config.fetch
|
|
@@ -1002,29 +1044,14 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
1002
1044
|
};
|
|
1003
1045
|
}
|
|
1004
1046
|
};
|
|
1005
|
-
var
|
|
1006
|
-
choices: z3.array(
|
|
1007
|
-
z3.object({
|
|
1008
|
-
text: z3.string(),
|
|
1009
|
-
reasoning: z3.string().nullish().optional(),
|
|
1010
|
-
finish_reason: z3.string(),
|
|
1011
|
-
logprobs: z3.object({
|
|
1012
|
-
tokens: z3.array(z3.string()),
|
|
1013
|
-
token_logprobs: z3.array(z3.number()),
|
|
1014
|
-
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
1015
|
-
}).nullable().optional()
|
|
1016
|
-
})
|
|
1017
|
-
),
|
|
1018
|
-
usage: z3.object({
|
|
1019
|
-
prompt_tokens: z3.number(),
|
|
1020
|
-
completion_tokens: z3.number()
|
|
1021
|
-
})
|
|
1022
|
-
});
|
|
1023
|
-
var openrouterCompletionChunkSchema = z3.union([
|
|
1047
|
+
var OpenRouterCompletionChunkSchema = z3.union([
|
|
1024
1048
|
z3.object({
|
|
1049
|
+
id: z3.string().optional(),
|
|
1050
|
+
model: z3.string().optional(),
|
|
1025
1051
|
choices: z3.array(
|
|
1026
1052
|
z3.object({
|
|
1027
1053
|
text: z3.string(),
|
|
1054
|
+
reasoning: z3.string().nullish().optional(),
|
|
1028
1055
|
finish_reason: z3.string().nullish(),
|
|
1029
1056
|
index: z3.number(),
|
|
1030
1057
|
logprobs: z3.object({
|
|
@@ -1039,7 +1066,7 @@ var openrouterCompletionChunkSchema = z3.union([
|
|
|
1039
1066
|
completion_tokens: z3.number()
|
|
1040
1067
|
}).optional().nullable()
|
|
1041
1068
|
}),
|
|
1042
|
-
|
|
1069
|
+
OpenRouterErrorResponseSchema
|
|
1043
1070
|
]);
|
|
1044
1071
|
|
|
1045
1072
|
// src/openrouter-facade.ts
|