@openrouter/ai-sdk-provider 0.2.2 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +38 -29
- package/dist/index.d.ts +38 -29
- package/dist/index.js +49 -19
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +49 -19
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +37 -25
- package/internal/dist/index.d.ts +37 -25
- package/internal/dist/index.js +49 -19
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +49 -19
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +7 -7
package/dist/index.d.mts
CHANGED
|
@@ -1,8 +1,38 @@
|
|
|
1
1
|
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
export { LanguageModelV1 } from '@ai-sdk/provider';
|
|
3
3
|
|
|
4
|
+
type OpenRouterLanguageModel = LanguageModelV1;
|
|
5
|
+
type OpenRouterSharedSettings = {
|
|
6
|
+
/**
|
|
7
|
+
* List of model IDs to try in order if the primary model fails, e.g. ["anthropic/claude-2","gryphe/mythomax-l2-13b"]
|
|
8
|
+
*/
|
|
9
|
+
models?: string[];
|
|
10
|
+
/**
|
|
11
|
+
* @deprecated use `reasoning` instead
|
|
12
|
+
*/
|
|
13
|
+
includeReasoning?: boolean;
|
|
14
|
+
/**
|
|
15
|
+
* https://openrouter.ai/docs/use-cases/reasoning-tokens
|
|
16
|
+
* One of `max_tokens` or `effort` is required.
|
|
17
|
+
* If `exclude` is true, reasoning will be removed from the response. Default is false.
|
|
18
|
+
*/
|
|
19
|
+
reasoning?: {
|
|
20
|
+
exclude?: boolean;
|
|
21
|
+
} & ({
|
|
22
|
+
max_tokens: number;
|
|
23
|
+
} | {
|
|
24
|
+
effort: "high" | "medium" | "low";
|
|
25
|
+
});
|
|
26
|
+
extraBody?: Record<string, any>;
|
|
27
|
+
/**
|
|
28
|
+
A unique identifier representing your end-user, which can help OpenRouter to
|
|
29
|
+
monitor and detect abuse.
|
|
30
|
+
*/
|
|
31
|
+
user?: string;
|
|
32
|
+
};
|
|
33
|
+
|
|
4
34
|
type OpenRouterChatModelId = string;
|
|
5
|
-
|
|
35
|
+
type OpenRouterChatSettings = {
|
|
6
36
|
/**
|
|
7
37
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
8
38
|
|
|
@@ -39,12 +69,7 @@ interface OpenRouterChatSettings {
|
|
|
39
69
|
monitor and detect abuse. Learn more.
|
|
40
70
|
*/
|
|
41
71
|
user?: string;
|
|
42
|
-
|
|
43
|
-
* List of model IDs to try in order if the primary model fails, e.g. ["anthropic/claude-2","gryphe/mythomax-l2-13b"]
|
|
44
|
-
*/
|
|
45
|
-
models?: string[];
|
|
46
|
-
includeReasoning?: boolean;
|
|
47
|
-
}
|
|
72
|
+
} & OpenRouterSharedSettings;
|
|
48
73
|
|
|
49
74
|
type OpenRouterChatConfig = {
|
|
50
75
|
provider: string;
|
|
@@ -70,12 +95,8 @@ declare class OpenRouterChatLanguageModel implements LanguageModelV1 {
|
|
|
70
95
|
doStream(options: Parameters<LanguageModelV1["doStream"]>[0]): Promise<Awaited<ReturnType<LanguageModelV1["doStream"]>>>;
|
|
71
96
|
}
|
|
72
97
|
|
|
73
|
-
type OpenRouterCompletionModelId = string
|
|
74
|
-
|
|
75
|
-
/**
|
|
76
|
-
Echo back the prompt in addition to the completion.
|
|
77
|
-
*/
|
|
78
|
-
echo?: boolean;
|
|
98
|
+
type OpenRouterCompletionModelId = string;
|
|
99
|
+
type OpenRouterCompletionSettings = {
|
|
79
100
|
/**
|
|
80
101
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
81
102
|
|
|
@@ -107,17 +128,7 @@ interface OpenRouterCompletionSettings {
|
|
|
107
128
|
The suffix that comes after a completion of inserted text.
|
|
108
129
|
*/
|
|
109
130
|
suffix?: string;
|
|
110
|
-
|
|
111
|
-
A unique identifier representing your end-user, which can help OpenRouter to
|
|
112
|
-
monitor and detect abuse. Learn more.
|
|
113
|
-
*/
|
|
114
|
-
user?: string;
|
|
115
|
-
/**
|
|
116
|
-
* List of model IDs to try in order if the primary model fails, e.g. ["openai/gpt-4","anthropic/claude-2"]
|
|
117
|
-
*/
|
|
118
|
-
models?: string[];
|
|
119
|
-
includeReasoning?: boolean;
|
|
120
|
-
}
|
|
131
|
+
} & OpenRouterSharedSettings;
|
|
121
132
|
|
|
122
133
|
type OpenRouterCompletionConfig = {
|
|
123
134
|
provider: string;
|
|
@@ -144,9 +155,9 @@ declare class OpenRouterCompletionLanguageModel implements LanguageModelV1 {
|
|
|
144
155
|
}
|
|
145
156
|
|
|
146
157
|
interface OpenRouterProvider {
|
|
147
|
-
(modelId:
|
|
158
|
+
(modelId: OpenRouterChatModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
148
159
|
(modelId: OpenRouterChatModelId, settings?: OpenRouterChatSettings): OpenRouterChatLanguageModel;
|
|
149
|
-
languageModel(modelId:
|
|
160
|
+
languageModel(modelId: OpenRouterChatModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
150
161
|
languageModel(modelId: OpenRouterChatModelId, settings?: OpenRouterChatSettings): OpenRouterChatLanguageModel;
|
|
151
162
|
/**
|
|
152
163
|
Creates an OpenRouter chat model for text generation.
|
|
@@ -226,6 +237,4 @@ declare class OpenRouter {
|
|
|
226
237
|
completion(modelId: OpenRouterCompletionModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
227
238
|
}
|
|
228
239
|
|
|
229
|
-
type OpenRouterLanguageModel
|
|
230
|
-
|
|
231
|
-
export { OpenRouter, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderSettings, createOpenRouter, openrouter };
|
|
240
|
+
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderSettings, type OpenRouterSharedSettings, createOpenRouter, openrouter };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,38 @@
|
|
|
1
1
|
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
export { LanguageModelV1 } from '@ai-sdk/provider';
|
|
3
3
|
|
|
4
|
+
type OpenRouterLanguageModel = LanguageModelV1;
|
|
5
|
+
type OpenRouterSharedSettings = {
|
|
6
|
+
/**
|
|
7
|
+
* List of model IDs to try in order if the primary model fails, e.g. ["anthropic/claude-2","gryphe/mythomax-l2-13b"]
|
|
8
|
+
*/
|
|
9
|
+
models?: string[];
|
|
10
|
+
/**
|
|
11
|
+
* @deprecated use `reasoning` instead
|
|
12
|
+
*/
|
|
13
|
+
includeReasoning?: boolean;
|
|
14
|
+
/**
|
|
15
|
+
* https://openrouter.ai/docs/use-cases/reasoning-tokens
|
|
16
|
+
* One of `max_tokens` or `effort` is required.
|
|
17
|
+
* If `exclude` is true, reasoning will be removed from the response. Default is false.
|
|
18
|
+
*/
|
|
19
|
+
reasoning?: {
|
|
20
|
+
exclude?: boolean;
|
|
21
|
+
} & ({
|
|
22
|
+
max_tokens: number;
|
|
23
|
+
} | {
|
|
24
|
+
effort: "high" | "medium" | "low";
|
|
25
|
+
});
|
|
26
|
+
extraBody?: Record<string, any>;
|
|
27
|
+
/**
|
|
28
|
+
A unique identifier representing your end-user, which can help OpenRouter to
|
|
29
|
+
monitor and detect abuse.
|
|
30
|
+
*/
|
|
31
|
+
user?: string;
|
|
32
|
+
};
|
|
33
|
+
|
|
4
34
|
type OpenRouterChatModelId = string;
|
|
5
|
-
|
|
35
|
+
type OpenRouterChatSettings = {
|
|
6
36
|
/**
|
|
7
37
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
8
38
|
|
|
@@ -39,12 +69,7 @@ interface OpenRouterChatSettings {
|
|
|
39
69
|
monitor and detect abuse. Learn more.
|
|
40
70
|
*/
|
|
41
71
|
user?: string;
|
|
42
|
-
|
|
43
|
-
* List of model IDs to try in order if the primary model fails, e.g. ["anthropic/claude-2","gryphe/mythomax-l2-13b"]
|
|
44
|
-
*/
|
|
45
|
-
models?: string[];
|
|
46
|
-
includeReasoning?: boolean;
|
|
47
|
-
}
|
|
72
|
+
} & OpenRouterSharedSettings;
|
|
48
73
|
|
|
49
74
|
type OpenRouterChatConfig = {
|
|
50
75
|
provider: string;
|
|
@@ -70,12 +95,8 @@ declare class OpenRouterChatLanguageModel implements LanguageModelV1 {
|
|
|
70
95
|
doStream(options: Parameters<LanguageModelV1["doStream"]>[0]): Promise<Awaited<ReturnType<LanguageModelV1["doStream"]>>>;
|
|
71
96
|
}
|
|
72
97
|
|
|
73
|
-
type OpenRouterCompletionModelId = string
|
|
74
|
-
|
|
75
|
-
/**
|
|
76
|
-
Echo back the prompt in addition to the completion.
|
|
77
|
-
*/
|
|
78
|
-
echo?: boolean;
|
|
98
|
+
type OpenRouterCompletionModelId = string;
|
|
99
|
+
type OpenRouterCompletionSettings = {
|
|
79
100
|
/**
|
|
80
101
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
81
102
|
|
|
@@ -107,17 +128,7 @@ interface OpenRouterCompletionSettings {
|
|
|
107
128
|
The suffix that comes after a completion of inserted text.
|
|
108
129
|
*/
|
|
109
130
|
suffix?: string;
|
|
110
|
-
|
|
111
|
-
A unique identifier representing your end-user, which can help OpenRouter to
|
|
112
|
-
monitor and detect abuse. Learn more.
|
|
113
|
-
*/
|
|
114
|
-
user?: string;
|
|
115
|
-
/**
|
|
116
|
-
* List of model IDs to try in order if the primary model fails, e.g. ["openai/gpt-4","anthropic/claude-2"]
|
|
117
|
-
*/
|
|
118
|
-
models?: string[];
|
|
119
|
-
includeReasoning?: boolean;
|
|
120
|
-
}
|
|
131
|
+
} & OpenRouterSharedSettings;
|
|
121
132
|
|
|
122
133
|
type OpenRouterCompletionConfig = {
|
|
123
134
|
provider: string;
|
|
@@ -144,9 +155,9 @@ declare class OpenRouterCompletionLanguageModel implements LanguageModelV1 {
|
|
|
144
155
|
}
|
|
145
156
|
|
|
146
157
|
interface OpenRouterProvider {
|
|
147
|
-
(modelId:
|
|
158
|
+
(modelId: OpenRouterChatModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
148
159
|
(modelId: OpenRouterChatModelId, settings?: OpenRouterChatSettings): OpenRouterChatLanguageModel;
|
|
149
|
-
languageModel(modelId:
|
|
160
|
+
languageModel(modelId: OpenRouterChatModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
150
161
|
languageModel(modelId: OpenRouterChatModelId, settings?: OpenRouterChatSettings): OpenRouterChatLanguageModel;
|
|
151
162
|
/**
|
|
152
163
|
Creates an OpenRouter chat model for text generation.
|
|
@@ -226,6 +237,4 @@ declare class OpenRouter {
|
|
|
226
237
|
completion(modelId: OpenRouterCompletionModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
227
238
|
}
|
|
228
239
|
|
|
229
|
-
type OpenRouterLanguageModel
|
|
230
|
-
|
|
231
|
-
export { OpenRouter, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderSettings, createOpenRouter, openrouter };
|
|
240
|
+
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderSettings, type OpenRouterSharedSettings, createOpenRouter, openrouter };
|
package/dist/index.js
CHANGED
|
@@ -135,6 +135,10 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
135
135
|
});
|
|
136
136
|
break;
|
|
137
137
|
}
|
|
138
|
+
// TODO: Handle reasoning and redacted-reasoning
|
|
139
|
+
case "reasoning":
|
|
140
|
+
case "redacted-reasoning":
|
|
141
|
+
break;
|
|
138
142
|
default: {
|
|
139
143
|
const _exhaustiveCheck = part;
|
|
140
144
|
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
@@ -236,10 +240,13 @@ var OpenRouterChatLanguageModel = class {
|
|
|
236
240
|
topP,
|
|
237
241
|
frequencyPenalty,
|
|
238
242
|
presencePenalty,
|
|
239
|
-
seed
|
|
243
|
+
seed,
|
|
244
|
+
stopSequences,
|
|
245
|
+
responseFormat,
|
|
246
|
+
topK
|
|
240
247
|
}) {
|
|
241
248
|
const type = mode.type;
|
|
242
|
-
const baseArgs = __spreadValues({
|
|
249
|
+
const baseArgs = __spreadValues(__spreadValues({
|
|
243
250
|
// model id:
|
|
244
251
|
model: this.modelId,
|
|
245
252
|
models: this.settings.models,
|
|
@@ -256,11 +263,15 @@ var OpenRouterChatLanguageModel = class {
|
|
|
256
263
|
frequency_penalty: frequencyPenalty,
|
|
257
264
|
presence_penalty: presencePenalty,
|
|
258
265
|
seed,
|
|
266
|
+
stop: stopSequences,
|
|
267
|
+
response_format: responseFormat,
|
|
268
|
+
top_k: topK,
|
|
259
269
|
// messages:
|
|
260
270
|
messages: convertToOpenRouterChatMessages(prompt),
|
|
261
|
-
//
|
|
262
|
-
include_reasoning: this.settings.includeReasoning
|
|
263
|
-
|
|
271
|
+
// OpenRouter specific settings:
|
|
272
|
+
include_reasoning: this.settings.includeReasoning,
|
|
273
|
+
reasoning: this.settings.reasoning
|
|
274
|
+
}, this.config.extraBody), this.settings.extraBody);
|
|
264
275
|
switch (type) {
|
|
265
276
|
case "regular": {
|
|
266
277
|
return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
|
|
@@ -704,7 +715,9 @@ function convertToOpenRouterCompletionPrompt({
|
|
|
704
715
|
}
|
|
705
716
|
default: {
|
|
706
717
|
const _exhaustiveCheck = part;
|
|
707
|
-
throw new Error(
|
|
718
|
+
throw new Error(
|
|
719
|
+
`Unsupported content type: ${_exhaustiveCheck}`
|
|
720
|
+
);
|
|
708
721
|
}
|
|
709
722
|
}
|
|
710
723
|
}).join("");
|
|
@@ -725,9 +738,21 @@ ${userMessage}
|
|
|
725
738
|
functionality: "tool-call messages"
|
|
726
739
|
});
|
|
727
740
|
}
|
|
741
|
+
case "reasoning": {
|
|
742
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
743
|
+
functionality: "reasoning messages"
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
case "redacted-reasoning": {
|
|
747
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
748
|
+
functionality: "redacted reasoning messages"
|
|
749
|
+
});
|
|
750
|
+
}
|
|
728
751
|
default: {
|
|
729
752
|
const _exhaustiveCheck = part;
|
|
730
|
-
throw new Error(
|
|
753
|
+
throw new Error(
|
|
754
|
+
`Unsupported content type: ${_exhaustiveCheck}`
|
|
755
|
+
);
|
|
731
756
|
}
|
|
732
757
|
}
|
|
733
758
|
}).join("");
|
|
@@ -751,9 +776,7 @@ ${assistantMessage}
|
|
|
751
776
|
text += `${assistant}:
|
|
752
777
|
`;
|
|
753
778
|
return {
|
|
754
|
-
prompt: text
|
|
755
|
-
stopSequences: [`
|
|
756
|
-
${user}:`]
|
|
779
|
+
prompt: text
|
|
757
780
|
};
|
|
758
781
|
}
|
|
759
782
|
|
|
@@ -795,17 +818,22 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
795
818
|
topP,
|
|
796
819
|
frequencyPenalty,
|
|
797
820
|
presencePenalty,
|
|
798
|
-
seed
|
|
821
|
+
seed,
|
|
822
|
+
responseFormat,
|
|
823
|
+
topK,
|
|
824
|
+
stopSequences
|
|
799
825
|
}) {
|
|
800
826
|
var _a;
|
|
801
827
|
const type = mode.type;
|
|
802
|
-
const { prompt: completionPrompt
|
|
803
|
-
|
|
828
|
+
const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
|
|
829
|
+
prompt,
|
|
830
|
+
inputFormat
|
|
831
|
+
});
|
|
832
|
+
const baseArgs = __spreadValues(__spreadValues({
|
|
804
833
|
// model id:
|
|
805
834
|
model: this.modelId,
|
|
806
835
|
models: this.settings.models,
|
|
807
836
|
// model specific settings:
|
|
808
|
-
echo: this.settings.echo,
|
|
809
837
|
logit_bias: this.settings.logitBias,
|
|
810
838
|
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
811
839
|
suffix: this.settings.suffix,
|
|
@@ -817,13 +845,15 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
817
845
|
frequency_penalty: frequencyPenalty,
|
|
818
846
|
presence_penalty: presencePenalty,
|
|
819
847
|
seed,
|
|
848
|
+
stop: stopSequences,
|
|
849
|
+
response_format: responseFormat,
|
|
850
|
+
top_k: topK,
|
|
820
851
|
// prompt:
|
|
821
852
|
prompt: completionPrompt,
|
|
822
|
-
//
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
}, this.config.extraBody);
|
|
853
|
+
// OpenRouter specific settings:
|
|
854
|
+
include_reasoning: this.settings.includeReasoning,
|
|
855
|
+
reasoning: this.settings.reasoning
|
|
856
|
+
}, this.config.extraBody), this.settings.extraBody);
|
|
827
857
|
switch (type) {
|
|
828
858
|
case "regular": {
|
|
829
859
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|