@openrouter/ai-sdk-provider 0.4.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -2
- package/dist/index.d.mts +28 -2
- package/dist/index.d.ts +28 -2
- package/dist/index.js +103 -43
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +103 -43
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +28 -2
- package/dist/internal/index.d.ts +28 -2
- package/dist/internal/index.js +102 -40
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +102 -40
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +18 -13
package/README.md
CHANGED
|
@@ -37,7 +37,7 @@ const { text } = await generateText({
|
|
|
37
37
|
|
|
38
38
|
## Supported models
|
|
39
39
|
|
|
40
|
-
This list is not a definitive list of models supported by OpenRouter, as it constantly changes as we add new models (and deprecate old ones) to our system.
|
|
40
|
+
This list is not a definitive list of models supported by OpenRouter, as it constantly changes as we add new models (and deprecate old ones) to our system.
|
|
41
41
|
You can find the latest list of models supported by OpenRouter [here](https://openrouter.ai/models).
|
|
42
42
|
|
|
43
43
|
You can find the latest list of tool-supported models supported by OpenRouter [here](https://openrouter.ai/models?order=newest&supported_parameters=tools). (Note: This list may contain models that are not compatible with the AI SDK.)
|
|
@@ -147,10 +147,37 @@ await streamText({
|
|
|
147
147
|
},
|
|
148
148
|
{
|
|
149
149
|
type: 'text',
|
|
150
|
-
text: '
|
|
150
|
+
text: 'List the speakers?',
|
|
151
151
|
},
|
|
152
152
|
],
|
|
153
153
|
},
|
|
154
154
|
],
|
|
155
155
|
});
|
|
156
156
|
```
|
|
157
|
+
|
|
158
|
+
## Use Cases
|
|
159
|
+
|
|
160
|
+
### Usage Accounting
|
|
161
|
+
|
|
162
|
+
The provider supports [OpenRouter usage accounting](https://openrouter.ai/docs/use-cases/usage-accounting), which allows you to track token usage details directly in your API responses, without making additional API calls.
|
|
163
|
+
|
|
164
|
+
```typescript
|
|
165
|
+
// Enable usage accounting
|
|
166
|
+
const model = openrouter('openai/gpt-3.5-turbo', {
|
|
167
|
+
usage: {
|
|
168
|
+
include: true,
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// Access usage accounting data
|
|
173
|
+
const result = await generateText({
|
|
174
|
+
model,
|
|
175
|
+
prompt: 'Hello, how are you today?',
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
// Provider-specific usage details (available in providerMetadata)
|
|
179
|
+
if (result.providerMetadata?.openrouter?.usage) {
|
|
180
|
+
console.log('Cost:', result.providerMetadata.openrouter.usage.cost);
|
|
181
|
+
console.log('Total Tokens:', result.providerMetadata.openrouter.usage.totalTokens);
|
|
182
|
+
}
|
|
183
|
+
```
|
package/dist/index.d.mts
CHANGED
|
@@ -27,7 +27,33 @@ type OpenRouterSharedSettings = OpenRouterProviderOptions & {
|
|
|
27
27
|
* @deprecated use `reasoning` instead
|
|
28
28
|
*/
|
|
29
29
|
includeReasoning?: boolean;
|
|
30
|
-
extraBody?: Record<string,
|
|
30
|
+
extraBody?: Record<string, unknown>;
|
|
31
|
+
/**
|
|
32
|
+
* Enable usage accounting to get detailed token usage information.
|
|
33
|
+
* https://openrouter.ai/docs/use-cases/usage-accounting
|
|
34
|
+
*/
|
|
35
|
+
usage?: {
|
|
36
|
+
/**
|
|
37
|
+
* When true, includes token usage information in the response.
|
|
38
|
+
*/
|
|
39
|
+
include: boolean;
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
/**
|
|
43
|
+
* Usage accounting response
|
|
44
|
+
* @see https://openrouter.ai/docs/use-cases/usage-accounting
|
|
45
|
+
*/
|
|
46
|
+
type OpenRouterUsageAccounting = {
|
|
47
|
+
promptTokens: number;
|
|
48
|
+
promptTokensDetails?: {
|
|
49
|
+
cachedTokens: number;
|
|
50
|
+
};
|
|
51
|
+
completionTokens: number;
|
|
52
|
+
completionTokensDetails?: {
|
|
53
|
+
reasoningTokens: number;
|
|
54
|
+
};
|
|
55
|
+
totalTokens: number;
|
|
56
|
+
cost?: number;
|
|
31
57
|
};
|
|
32
58
|
|
|
33
59
|
type OpenRouterChatModelId = string;
|
|
@@ -236,4 +262,4 @@ declare class OpenRouter {
|
|
|
236
262
|
completion(modelId: OpenRouterCompletionModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
237
263
|
}
|
|
238
264
|
|
|
239
|
-
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderOptions, type OpenRouterProviderSettings, type OpenRouterSharedSettings, createOpenRouter, openrouter };
|
|
265
|
+
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderOptions, type OpenRouterProviderSettings, type OpenRouterSharedSettings, type OpenRouterUsageAccounting, createOpenRouter, openrouter };
|
package/dist/index.d.ts
CHANGED
|
@@ -27,7 +27,33 @@ type OpenRouterSharedSettings = OpenRouterProviderOptions & {
|
|
|
27
27
|
* @deprecated use `reasoning` instead
|
|
28
28
|
*/
|
|
29
29
|
includeReasoning?: boolean;
|
|
30
|
-
extraBody?: Record<string,
|
|
30
|
+
extraBody?: Record<string, unknown>;
|
|
31
|
+
/**
|
|
32
|
+
* Enable usage accounting to get detailed token usage information.
|
|
33
|
+
* https://openrouter.ai/docs/use-cases/usage-accounting
|
|
34
|
+
*/
|
|
35
|
+
usage?: {
|
|
36
|
+
/**
|
|
37
|
+
* When true, includes token usage information in the response.
|
|
38
|
+
*/
|
|
39
|
+
include: boolean;
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
/**
|
|
43
|
+
* Usage accounting response
|
|
44
|
+
* @see https://openrouter.ai/docs/use-cases/usage-accounting
|
|
45
|
+
*/
|
|
46
|
+
type OpenRouterUsageAccounting = {
|
|
47
|
+
promptTokens: number;
|
|
48
|
+
promptTokensDetails?: {
|
|
49
|
+
cachedTokens: number;
|
|
50
|
+
};
|
|
51
|
+
completionTokens: number;
|
|
52
|
+
completionTokensDetails?: {
|
|
53
|
+
reasoningTokens: number;
|
|
54
|
+
};
|
|
55
|
+
totalTokens: number;
|
|
56
|
+
cost?: number;
|
|
31
57
|
};
|
|
32
58
|
|
|
33
59
|
type OpenRouterChatModelId = string;
|
|
@@ -236,4 +262,4 @@ declare class OpenRouter {
|
|
|
236
262
|
completion(modelId: OpenRouterCompletionModelId, settings?: OpenRouterCompletionSettings): OpenRouterCompletionLanguageModel;
|
|
237
263
|
}
|
|
238
264
|
|
|
239
|
-
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderOptions, type OpenRouterProviderSettings, type OpenRouterSharedSettings, createOpenRouter, openrouter };
|
|
265
|
+
export { OpenRouter, type OpenRouterCompletionSettings, type OpenRouterLanguageModel, type OpenRouterProvider, type OpenRouterProviderOptions, type OpenRouterProviderSettings, type OpenRouterSharedSettings, type OpenRouterUsageAccounting, createOpenRouter, openrouter };
|
package/dist/index.js
CHANGED
|
@@ -96,14 +96,15 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
96
96
|
const messageCacheControl = getCacheControl(providerMetadata);
|
|
97
97
|
const contentParts = content.map(
|
|
98
98
|
(part) => {
|
|
99
|
-
var _a2, _b2, _c2, _d
|
|
99
|
+
var _a2, _b2, _c2, _d;
|
|
100
|
+
const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
|
|
100
101
|
switch (part.type) {
|
|
101
102
|
case "text":
|
|
102
103
|
return {
|
|
103
104
|
type: "text",
|
|
104
105
|
text: part.text,
|
|
105
106
|
// For text parts, only use part-specific cache control
|
|
106
|
-
cache_control:
|
|
107
|
+
cache_control: cacheControl
|
|
107
108
|
};
|
|
108
109
|
case "image":
|
|
109
110
|
return {
|
|
@@ -114,18 +115,18 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
114
115
|
)}`
|
|
115
116
|
},
|
|
116
117
|
// For image parts, use part-specific or message-level cache control
|
|
117
|
-
cache_control:
|
|
118
|
+
cache_control: cacheControl
|
|
118
119
|
};
|
|
119
120
|
case "file":
|
|
120
121
|
return {
|
|
121
122
|
type: "file",
|
|
122
123
|
file: {
|
|
123
124
|
filename: String(
|
|
124
|
-
(
|
|
125
|
+
(_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
|
|
125
126
|
),
|
|
126
127
|
file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.data)}` : `data:${part.mimeType};base64,${part.data}`
|
|
127
128
|
},
|
|
128
|
-
cache_control:
|
|
129
|
+
cache_control: cacheControl
|
|
129
130
|
};
|
|
130
131
|
default: {
|
|
131
132
|
const _exhaustiveCheck = part;
|
|
@@ -162,6 +163,7 @@ function convertToOpenRouterChatMessages(prompt) {
|
|
|
162
163
|
});
|
|
163
164
|
break;
|
|
164
165
|
}
|
|
166
|
+
case "file":
|
|
165
167
|
// TODO: Handle reasoning and redacted-reasoning
|
|
166
168
|
case "reasoning":
|
|
167
169
|
case "redacted-reasoning":
|
|
@@ -277,7 +279,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
277
279
|
}) {
|
|
278
280
|
var _a;
|
|
279
281
|
const type = mode.type;
|
|
280
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
282
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
281
283
|
const baseArgs = __spreadValues(__spreadValues(__spreadValues({
|
|
282
284
|
// model id:
|
|
283
285
|
model: this.modelId,
|
|
@@ -302,7 +304,8 @@ var OpenRouterChatLanguageModel = class {
|
|
|
302
304
|
messages: convertToOpenRouterChatMessages(prompt),
|
|
303
305
|
// OpenRouter specific settings:
|
|
304
306
|
include_reasoning: this.settings.includeReasoning,
|
|
305
|
-
reasoning: this.settings.reasoning
|
|
307
|
+
reasoning: this.settings.reasoning,
|
|
308
|
+
usage: this.settings.usage
|
|
306
309
|
}, this.config.extraBody), this.settings.extraBody), extraCallingBody);
|
|
307
310
|
switch (type) {
|
|
308
311
|
case "regular": {
|
|
@@ -338,7 +341,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
338
341
|
}
|
|
339
342
|
}
|
|
340
343
|
async doGenerate(options) {
|
|
341
|
-
var _b, _c, _d, _e, _f, _g, _h;
|
|
344
|
+
var _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
342
345
|
const args = this.getArgs(options);
|
|
343
346
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
344
347
|
url: this.config.url({
|
|
@@ -359,14 +362,39 @@ var OpenRouterChatLanguageModel = class {
|
|
|
359
362
|
if (!choice) {
|
|
360
363
|
throw new Error("No choice in response");
|
|
361
364
|
}
|
|
362
|
-
|
|
365
|
+
const usageInfo = response.usage ? {
|
|
366
|
+
promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
|
|
367
|
+
completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
|
|
368
|
+
} : {
|
|
369
|
+
promptTokens: 0,
|
|
370
|
+
completionTokens: 0
|
|
371
|
+
};
|
|
372
|
+
const providerMetadata = {};
|
|
373
|
+
if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
|
|
374
|
+
providerMetadata.openrouter = {
|
|
375
|
+
usage: {
|
|
376
|
+
promptTokens: response.usage.prompt_tokens,
|
|
377
|
+
promptTokensDetails: response.usage.prompt_tokens_details ? {
|
|
378
|
+
cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
|
|
379
|
+
} : void 0,
|
|
380
|
+
completionTokens: response.usage.completion_tokens,
|
|
381
|
+
completionTokensDetails: response.usage.completion_tokens_details ? {
|
|
382
|
+
reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
|
|
383
|
+
} : void 0,
|
|
384
|
+
cost: response.usage.cost,
|
|
385
|
+
totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
|
|
386
|
+
}
|
|
387
|
+
};
|
|
388
|
+
}
|
|
389
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
|
|
390
|
+
return __spreadValues({
|
|
363
391
|
response: {
|
|
364
392
|
id: response.id,
|
|
365
393
|
modelId: response.model
|
|
366
394
|
},
|
|
367
|
-
text: (
|
|
368
|
-
reasoning: (
|
|
369
|
-
toolCalls: (
|
|
395
|
+
text: (_h = choice.message.content) != null ? _h : void 0,
|
|
396
|
+
reasoning: (_i = choice.message.reasoning) != null ? _i : void 0,
|
|
397
|
+
toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
|
|
370
398
|
var _a2;
|
|
371
399
|
return {
|
|
372
400
|
toolCallType: "function",
|
|
@@ -376,17 +404,15 @@ var OpenRouterChatLanguageModel = class {
|
|
|
376
404
|
};
|
|
377
405
|
}),
|
|
378
406
|
finishReason: mapOpenRouterFinishReason(choice.finish_reason),
|
|
379
|
-
usage:
|
|
380
|
-
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : 0,
|
|
381
|
-
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : 0
|
|
382
|
-
},
|
|
407
|
+
usage: usageInfo,
|
|
383
408
|
rawCall: { rawPrompt, rawSettings },
|
|
384
409
|
rawResponse: { headers: responseHeaders },
|
|
385
410
|
warnings: [],
|
|
386
411
|
logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
|
|
387
|
-
};
|
|
412
|
+
}, hasProviderMetadata ? { providerMetadata } : {});
|
|
388
413
|
}
|
|
389
414
|
async doStream(options) {
|
|
415
|
+
var _a, _c;
|
|
390
416
|
const args = this.getArgs(options);
|
|
391
417
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
392
418
|
url: this.config.url({
|
|
@@ -397,7 +423,9 @@ var OpenRouterChatLanguageModel = class {
|
|
|
397
423
|
body: __spreadProps(__spreadValues({}, args), {
|
|
398
424
|
stream: true,
|
|
399
425
|
// only include stream_options when in strict compatibility mode:
|
|
400
|
-
stream_options: this.config.compatibility === "strict" ? {
|
|
426
|
+
stream_options: this.config.compatibility === "strict" ? __spreadValues({
|
|
427
|
+
include_usage: true
|
|
428
|
+
}, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
|
|
401
429
|
}),
|
|
402
430
|
failedResponseHandler: openrouterFailedResponseHandler,
|
|
403
431
|
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
@@ -406,7 +434,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
406
434
|
abortSignal: options.abortSignal,
|
|
407
435
|
fetch: this.config.fetch
|
|
408
436
|
});
|
|
409
|
-
const
|
|
437
|
+
const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
|
|
410
438
|
const toolCalls = [];
|
|
411
439
|
let finishReason = "other";
|
|
412
440
|
let usage = {
|
|
@@ -414,11 +442,13 @@ var OpenRouterChatLanguageModel = class {
|
|
|
414
442
|
completionTokens: Number.NaN
|
|
415
443
|
};
|
|
416
444
|
let logprobs;
|
|
445
|
+
const openrouterUsage = {};
|
|
446
|
+
const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
|
|
417
447
|
return {
|
|
418
448
|
stream: response.pipeThrough(
|
|
419
449
|
new TransformStream({
|
|
420
450
|
transform(chunk, controller) {
|
|
421
|
-
var _a2,
|
|
451
|
+
var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
422
452
|
if (!chunk.success) {
|
|
423
453
|
finishReason = "error";
|
|
424
454
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -447,6 +477,20 @@ var OpenRouterChatLanguageModel = class {
|
|
|
447
477
|
promptTokens: value.usage.prompt_tokens,
|
|
448
478
|
completionTokens: value.usage.completion_tokens
|
|
449
479
|
};
|
|
480
|
+
openrouterUsage.promptTokens = value.usage.prompt_tokens;
|
|
481
|
+
if (value.usage.prompt_tokens_details) {
|
|
482
|
+
openrouterUsage.promptTokensDetails = {
|
|
483
|
+
cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
openrouterUsage.completionTokens = value.usage.completion_tokens;
|
|
487
|
+
if (value.usage.completion_tokens_details) {
|
|
488
|
+
openrouterUsage.completionTokensDetails = {
|
|
489
|
+
reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
|
|
490
|
+
};
|
|
491
|
+
}
|
|
492
|
+
openrouterUsage.cost = value.usage.cost;
|
|
493
|
+
openrouterUsage.totalTokens = value.usage.total_tokens;
|
|
450
494
|
}
|
|
451
495
|
const choice = value.choices[0];
|
|
452
496
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
@@ -491,7 +535,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
491
535
|
message: `Expected 'id' to be a string.`
|
|
492
536
|
});
|
|
493
537
|
}
|
|
494
|
-
if (((
|
|
538
|
+
if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
|
|
495
539
|
throw new import_provider.InvalidResponseDataError({
|
|
496
540
|
data: toolCallDelta,
|
|
497
541
|
message: `Expected 'function.name' to be a string.`
|
|
@@ -502,7 +546,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
502
546
|
type: "function",
|
|
503
547
|
function: {
|
|
504
548
|
name: toolCallDelta.function.name,
|
|
505
|
-
arguments: (
|
|
549
|
+
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
|
|
506
550
|
},
|
|
507
551
|
sent: false
|
|
508
552
|
};
|
|
@@ -510,7 +554,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
510
554
|
if (toolCall2 == null) {
|
|
511
555
|
throw new Error("Tool call is missing");
|
|
512
556
|
}
|
|
513
|
-
if (((
|
|
557
|
+
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
|
|
514
558
|
controller.enqueue({
|
|
515
559
|
type: "tool-call-delta",
|
|
516
560
|
toolCallType: "function",
|
|
@@ -521,7 +565,7 @@ var OpenRouterChatLanguageModel = class {
|
|
|
521
565
|
controller.enqueue({
|
|
522
566
|
type: "tool-call",
|
|
523
567
|
toolCallType: "function",
|
|
524
|
-
toolCallId: (
|
|
568
|
+
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
|
|
525
569
|
toolName: toolCall2.function.name,
|
|
526
570
|
args: toolCall2.function.arguments
|
|
527
571
|
});
|
|
@@ -533,21 +577,21 @@ var OpenRouterChatLanguageModel = class {
|
|
|
533
577
|
if (toolCall == null) {
|
|
534
578
|
throw new Error("Tool call is missing");
|
|
535
579
|
}
|
|
536
|
-
if (((
|
|
537
|
-
toolCall.function.arguments += (
|
|
580
|
+
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
|
|
581
|
+
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
|
|
538
582
|
}
|
|
539
583
|
controller.enqueue({
|
|
540
584
|
type: "tool-call-delta",
|
|
541
585
|
toolCallType: "function",
|
|
542
586
|
toolCallId: toolCall.id,
|
|
543
587
|
toolName: toolCall.function.name,
|
|
544
|
-
argsTextDelta: (
|
|
588
|
+
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
|
|
545
589
|
});
|
|
546
|
-
if (((
|
|
590
|
+
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
|
|
547
591
|
controller.enqueue({
|
|
548
592
|
type: "tool-call",
|
|
549
593
|
toolCallType: "function",
|
|
550
|
-
toolCallId: (
|
|
594
|
+
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
|
|
551
595
|
toolName: toolCall.function.name,
|
|
552
596
|
args: toolCall.function.arguments
|
|
553
597
|
});
|
|
@@ -573,12 +617,19 @@ var OpenRouterChatLanguageModel = class {
|
|
|
573
617
|
}
|
|
574
618
|
}
|
|
575
619
|
}
|
|
576
|
-
|
|
620
|
+
const providerMetadata = {};
|
|
621
|
+
if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
|
|
622
|
+
providerMetadata.openrouter = {
|
|
623
|
+
usage: openrouterUsage
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
|
|
627
|
+
controller.enqueue(__spreadValues({
|
|
577
628
|
type: "finish",
|
|
578
629
|
finishReason,
|
|
579
630
|
logprobs,
|
|
580
631
|
usage
|
|
581
|
-
});
|
|
632
|
+
}, hasProviderMetadata ? { providerMetadata } : {}));
|
|
582
633
|
}
|
|
583
634
|
})
|
|
584
635
|
),
|
|
@@ -593,8 +644,15 @@ var OpenRouterChatCompletionBaseResponseSchema = import_zod2.z.object({
|
|
|
593
644
|
model: import_zod2.z.string().optional(),
|
|
594
645
|
usage: import_zod2.z.object({
|
|
595
646
|
prompt_tokens: import_zod2.z.number(),
|
|
647
|
+
prompt_tokens_details: import_zod2.z.object({
|
|
648
|
+
cached_tokens: import_zod2.z.number()
|
|
649
|
+
}).optional(),
|
|
596
650
|
completion_tokens: import_zod2.z.number(),
|
|
597
|
-
|
|
651
|
+
completion_tokens_details: import_zod2.z.object({
|
|
652
|
+
reasoning_tokens: import_zod2.z.number()
|
|
653
|
+
}).optional(),
|
|
654
|
+
total_tokens: import_zod2.z.number(),
|
|
655
|
+
cost: import_zod2.z.number().optional()
|
|
598
656
|
}).nullish()
|
|
599
657
|
});
|
|
600
658
|
var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
|
|
@@ -691,14 +749,13 @@ function prepareToolsAndToolChoice(mode) {
|
|
|
691
749
|
parameters: tool.parameters
|
|
692
750
|
}
|
|
693
751
|
};
|
|
694
|
-
} else {
|
|
695
|
-
return {
|
|
696
|
-
type: "function",
|
|
697
|
-
function: {
|
|
698
|
-
name: tool.name
|
|
699
|
-
}
|
|
700
|
-
};
|
|
701
752
|
}
|
|
753
|
+
return {
|
|
754
|
+
type: "function",
|
|
755
|
+
function: {
|
|
756
|
+
name: tool.name
|
|
757
|
+
}
|
|
758
|
+
};
|
|
702
759
|
});
|
|
703
760
|
const toolChoice = mode.toolChoice;
|
|
704
761
|
if (toolChoice == null) {
|
|
@@ -809,6 +866,11 @@ ${userMessage}
|
|
|
809
866
|
functionality: "redacted reasoning messages"
|
|
810
867
|
});
|
|
811
868
|
}
|
|
869
|
+
case "file": {
|
|
870
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
871
|
+
functionality: "file attachments"
|
|
872
|
+
});
|
|
873
|
+
}
|
|
812
874
|
default: {
|
|
813
875
|
const _exhaustiveCheck = part;
|
|
814
876
|
throw new Error(
|
|
@@ -887,7 +949,7 @@ var OpenRouterCompletionLanguageModel = class {
|
|
|
887
949
|
}) {
|
|
888
950
|
var _a, _b;
|
|
889
951
|
const type = mode.type;
|
|
890
|
-
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata
|
|
952
|
+
const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
|
|
891
953
|
const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
|
|
892
954
|
prompt,
|
|
893
955
|
inputFormat
|
|
@@ -1184,9 +1246,7 @@ function createOpenRouter(options = {}) {
|
|
|
1184
1246
|
}
|
|
1185
1247
|
return createChatModel(modelId, settings);
|
|
1186
1248
|
};
|
|
1187
|
-
const provider =
|
|
1188
|
-
return createLanguageModel(modelId, settings);
|
|
1189
|
-
};
|
|
1249
|
+
const provider = (modelId, settings) => createLanguageModel(modelId, settings);
|
|
1190
1250
|
provider.languageModel = createLanguageModel;
|
|
1191
1251
|
provider.chat = createChatModel;
|
|
1192
1252
|
provider.completion = createCompletionModel;
|