@openrouter/ai-sdk-provider 0.7.1 → 1.0.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -17,24 +17,878 @@ var __spreadValues = (a, b) => {
17
17
  return a;
18
18
  };
19
19
  var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
20
- var __objRest = (source, exclude) => {
21
- var target = {};
22
- for (var prop in source)
23
- if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
24
- target[prop] = source[prop];
25
- if (source != null && __getOwnPropSymbols)
26
- for (var prop of __getOwnPropSymbols(source)) {
27
- if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
28
- target[prop] = source[prop];
20
+
21
+ // node_modules/.pnpm/@ai-sdk+provider@2.0.0-beta.1/node_modules/@ai-sdk/provider/dist/index.mjs
22
+ var marker = "vercel.ai.error";
23
+ var symbol = Symbol.for(marker);
24
+ var _a;
25
+ var _AISDKError = class _AISDKError2 extends Error {
26
+ /**
27
+ * Creates an AI SDK Error.
28
+ *
29
+ * @param {Object} params - The parameters for creating the error.
30
+ * @param {string} params.name - The name of the error.
31
+ * @param {string} params.message - The error message.
32
+ * @param {unknown} [params.cause] - The underlying cause of the error.
33
+ */
34
+ constructor({
35
+ name: name14,
36
+ message,
37
+ cause
38
+ }) {
39
+ super(message);
40
+ this[_a] = true;
41
+ this.name = name14;
42
+ this.cause = cause;
43
+ }
44
+ /**
45
+ * Checks if the given error is an AI SDK Error.
46
+ * @param {unknown} error - The error to check.
47
+ * @returns {boolean} True if the error is an AI SDK Error, false otherwise.
48
+ */
49
+ static isInstance(error) {
50
+ return _AISDKError2.hasMarker(error, marker);
51
+ }
52
+ static hasMarker(error, marker15) {
53
+ const markerSymbol = Symbol.for(marker15);
54
+ return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
55
+ }
56
+ };
57
+ _a = symbol;
58
+ var AISDKError = _AISDKError;
59
+ var name = "AI_APICallError";
60
+ var marker2 = `vercel.ai.error.${name}`;
61
+ var symbol2 = Symbol.for(marker2);
62
+ var _a2;
63
+ var APICallError = class extends AISDKError {
64
+ constructor({
65
+ message,
66
+ url,
67
+ requestBodyValues,
68
+ statusCode,
69
+ responseHeaders,
70
+ responseBody,
71
+ cause,
72
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
73
+ statusCode === 409 || // conflict
74
+ statusCode === 429 || // too many requests
75
+ statusCode >= 500),
76
+ // server error
77
+ data
78
+ }) {
79
+ super({ name, message, cause });
80
+ this[_a2] = true;
81
+ this.url = url;
82
+ this.requestBodyValues = requestBodyValues;
83
+ this.statusCode = statusCode;
84
+ this.responseHeaders = responseHeaders;
85
+ this.responseBody = responseBody;
86
+ this.isRetryable = isRetryable;
87
+ this.data = data;
88
+ }
89
+ static isInstance(error) {
90
+ return AISDKError.hasMarker(error, marker2);
91
+ }
92
+ };
93
+ _a2 = symbol2;
94
+ var name2 = "AI_EmptyResponseBodyError";
95
+ var marker3 = `vercel.ai.error.${name2}`;
96
+ var symbol3 = Symbol.for(marker3);
97
+ var _a3;
98
+ var EmptyResponseBodyError = class extends AISDKError {
99
+ // used in isInstance
100
+ constructor({ message = "Empty response body" } = {}) {
101
+ super({ name: name2, message });
102
+ this[_a3] = true;
103
+ }
104
+ static isInstance(error) {
105
+ return AISDKError.hasMarker(error, marker3);
106
+ }
107
+ };
108
+ _a3 = symbol3;
109
+ function getErrorMessage(error) {
110
+ if (error == null) {
111
+ return "unknown error";
112
+ }
113
+ if (typeof error === "string") {
114
+ return error;
115
+ }
116
+ if (error instanceof Error) {
117
+ return error.message;
118
+ }
119
+ return JSON.stringify(error);
120
+ }
121
+ var name3 = "AI_InvalidArgumentError";
122
+ var marker4 = `vercel.ai.error.${name3}`;
123
+ var symbol4 = Symbol.for(marker4);
124
+ var _a4;
125
+ var InvalidArgumentError = class extends AISDKError {
126
+ constructor({
127
+ message,
128
+ cause,
129
+ argument
130
+ }) {
131
+ super({ name: name3, message, cause });
132
+ this[_a4] = true;
133
+ this.argument = argument;
134
+ }
135
+ static isInstance(error) {
136
+ return AISDKError.hasMarker(error, marker4);
137
+ }
138
+ };
139
+ _a4 = symbol4;
140
+ var name4 = "AI_InvalidPromptError";
141
+ var marker5 = `vercel.ai.error.${name4}`;
142
+ var symbol5 = Symbol.for(marker5);
143
+ var _a5;
144
+ var InvalidPromptError = class extends AISDKError {
145
+ constructor({
146
+ prompt,
147
+ message,
148
+ cause
149
+ }) {
150
+ super({ name: name4, message: `Invalid prompt: ${message}`, cause });
151
+ this[_a5] = true;
152
+ this.prompt = prompt;
153
+ }
154
+ static isInstance(error) {
155
+ return AISDKError.hasMarker(error, marker5);
156
+ }
157
+ };
158
+ _a5 = symbol5;
159
+ var name5 = "AI_InvalidResponseDataError";
160
+ var marker6 = `vercel.ai.error.${name5}`;
161
+ var symbol6 = Symbol.for(marker6);
162
+ var _a6;
163
+ var InvalidResponseDataError = class extends AISDKError {
164
+ constructor({
165
+ data,
166
+ message = `Invalid response data: ${JSON.stringify(data)}.`
167
+ }) {
168
+ super({ name: name5, message });
169
+ this[_a6] = true;
170
+ this.data = data;
171
+ }
172
+ static isInstance(error) {
173
+ return AISDKError.hasMarker(error, marker6);
174
+ }
175
+ };
176
+ _a6 = symbol6;
177
+ var name6 = "AI_JSONParseError";
178
+ var marker7 = `vercel.ai.error.${name6}`;
179
+ var symbol7 = Symbol.for(marker7);
180
+ var _a7;
181
+ var JSONParseError = class extends AISDKError {
182
+ constructor({ text, cause }) {
183
+ super({
184
+ name: name6,
185
+ message: `JSON parsing failed: Text: ${text}.
186
+ Error message: ${getErrorMessage(cause)}`,
187
+ cause
188
+ });
189
+ this[_a7] = true;
190
+ this.text = text;
191
+ }
192
+ static isInstance(error) {
193
+ return AISDKError.hasMarker(error, marker7);
194
+ }
195
+ };
196
+ _a7 = symbol7;
197
+ var name7 = "AI_LoadAPIKeyError";
198
+ var marker8 = `vercel.ai.error.${name7}`;
199
+ var symbol8 = Symbol.for(marker8);
200
+ var _a8;
201
+ var LoadAPIKeyError = class extends AISDKError {
202
+ // used in isInstance
203
+ constructor({ message }) {
204
+ super({ name: name7, message });
205
+ this[_a8] = true;
206
+ }
207
+ static isInstance(error) {
208
+ return AISDKError.hasMarker(error, marker8);
209
+ }
210
+ };
211
+ _a8 = symbol8;
212
+ var name8 = "AI_LoadSettingError";
213
+ var marker9 = `vercel.ai.error.${name8}`;
214
+ var symbol9 = Symbol.for(marker9);
215
+ var _a9;
216
+ _a9 = symbol9;
217
+ var name9 = "AI_NoContentGeneratedError";
218
+ var marker10 = `vercel.ai.error.${name9}`;
219
+ var symbol10 = Symbol.for(marker10);
220
+ var _a10;
221
+ _a10 = symbol10;
222
+ var name10 = "AI_NoSuchModelError";
223
+ var marker11 = `vercel.ai.error.${name10}`;
224
+ var symbol11 = Symbol.for(marker11);
225
+ var _a11;
226
+ _a11 = symbol11;
227
+ var name11 = "AI_TooManyEmbeddingValuesForCallError";
228
+ var marker12 = `vercel.ai.error.${name11}`;
229
+ var symbol12 = Symbol.for(marker12);
230
+ var _a12;
231
+ _a12 = symbol12;
232
+ var name12 = "AI_TypeValidationError";
233
+ var marker13 = `vercel.ai.error.${name12}`;
234
+ var symbol13 = Symbol.for(marker13);
235
+ var _a13;
236
+ var _TypeValidationError = class _TypeValidationError2 extends AISDKError {
237
+ constructor({ value, cause }) {
238
+ super({
239
+ name: name12,
240
+ message: `Type validation failed: Value: ${JSON.stringify(value)}.
241
+ Error message: ${getErrorMessage(cause)}`,
242
+ cause
243
+ });
244
+ this[_a13] = true;
245
+ this.value = value;
246
+ }
247
+ static isInstance(error) {
248
+ return AISDKError.hasMarker(error, marker13);
249
+ }
250
+ /**
251
+ * Wraps an error into a TypeValidationError.
252
+ * If the cause is already a TypeValidationError with the same value, it returns the cause.
253
+ * Otherwise, it creates a new TypeValidationError.
254
+ *
255
+ * @param {Object} params - The parameters for wrapping the error.
256
+ * @param {unknown} params.value - The value that failed validation.
257
+ * @param {unknown} params.cause - The original error or cause of the validation failure.
258
+ * @returns {TypeValidationError} A TypeValidationError instance.
259
+ */
260
+ static wrap({
261
+ value,
262
+ cause
263
+ }) {
264
+ return _TypeValidationError2.isInstance(cause) && cause.value === value ? cause : new _TypeValidationError2({ value, cause });
265
+ }
266
+ };
267
+ _a13 = symbol13;
268
+ var TypeValidationError = _TypeValidationError;
269
+ var name13 = "AI_UnsupportedFunctionalityError";
270
+ var marker14 = `vercel.ai.error.${name13}`;
271
+ var symbol14 = Symbol.for(marker14);
272
+ var _a14;
273
+ var UnsupportedFunctionalityError = class extends AISDKError {
274
+ constructor({
275
+ functionality,
276
+ message = `'${functionality}' functionality not supported.`
277
+ }) {
278
+ super({ name: name13, message });
279
+ this[_a14] = true;
280
+ this.functionality = functionality;
281
+ }
282
+ static isInstance(error) {
283
+ return AISDKError.hasMarker(error, marker14);
284
+ }
285
+ };
286
+ _a14 = symbol14;
287
+
288
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/index.js
289
+ var ParseError = class extends Error {
290
+ constructor(message, options) {
291
+ super(message), this.name = "ParseError", this.type = options.type, this.field = options.field, this.value = options.value, this.line = options.line;
292
+ }
293
+ };
294
+ function noop(_arg) {
295
+ }
296
+ function createParser(callbacks) {
297
+ if (typeof callbacks == "function")
298
+ throw new TypeError(
299
+ "`callbacks` must be an object, got a function instead. Did you mean `{onEvent: fn}`?"
300
+ );
301
+ const { onEvent = noop, onError = noop, onRetry = noop, onComment } = callbacks;
302
+ let incompleteLine = "", isFirstChunk = true, id, data = "", eventType = "";
303
+ function feed(newChunk) {
304
+ const chunk = isFirstChunk ? newChunk.replace(/^\xEF\xBB\xBF/, "") : newChunk, [complete, incomplete] = splitLines(`${incompleteLine}${chunk}`);
305
+ for (const line of complete)
306
+ parseLine(line);
307
+ incompleteLine = incomplete, isFirstChunk = false;
308
+ }
309
+ function parseLine(line) {
310
+ if (line === "") {
311
+ dispatchEvent();
312
+ return;
313
+ }
314
+ if (line.startsWith(":")) {
315
+ onComment && onComment(line.slice(line.startsWith(": ") ? 2 : 1));
316
+ return;
317
+ }
318
+ const fieldSeparatorIndex = line.indexOf(":");
319
+ if (fieldSeparatorIndex !== -1) {
320
+ const field = line.slice(0, fieldSeparatorIndex), offset = line[fieldSeparatorIndex + 1] === " " ? 2 : 1, value = line.slice(fieldSeparatorIndex + offset);
321
+ processField(field, value, line);
322
+ return;
29
323
  }
30
- return target;
324
+ processField(line, "", line);
325
+ }
326
+ function processField(field, value, line) {
327
+ switch (field) {
328
+ case "event":
329
+ eventType = value;
330
+ break;
331
+ case "data":
332
+ data = `${data}${value}
333
+ `;
334
+ break;
335
+ case "id":
336
+ id = value.includes("\0") ? void 0 : value;
337
+ break;
338
+ case "retry":
339
+ /^\d+$/.test(value) ? onRetry(parseInt(value, 10)) : onError(
340
+ new ParseError(`Invalid \`retry\` value: "${value}"`, {
341
+ type: "invalid-retry",
342
+ value,
343
+ line
344
+ })
345
+ );
346
+ break;
347
+ default:
348
+ onError(
349
+ new ParseError(
350
+ `Unknown field "${field.length > 20 ? `${field.slice(0, 20)}\u2026` : field}"`,
351
+ { type: "unknown-field", field, value, line }
352
+ )
353
+ );
354
+ break;
355
+ }
356
+ }
357
+ function dispatchEvent() {
358
+ data.length > 0 && onEvent({
359
+ id,
360
+ event: eventType || void 0,
361
+ // If the data buffer's last character is a U+000A LINE FEED (LF) character,
362
+ // then remove the last character from the data buffer.
363
+ data: data.endsWith(`
364
+ `) ? data.slice(0, -1) : data
365
+ }), id = void 0, data = "", eventType = "";
366
+ }
367
+ function reset(options = {}) {
368
+ incompleteLine && options.consume && parseLine(incompleteLine), isFirstChunk = true, id = void 0, data = "", eventType = "", incompleteLine = "";
369
+ }
370
+ return { feed, reset };
371
+ }
372
+ function splitLines(chunk) {
373
+ const lines = [];
374
+ let incompleteLine = "", searchIndex = 0;
375
+ for (; searchIndex < chunk.length; ) {
376
+ const crIndex = chunk.indexOf("\r", searchIndex), lfIndex = chunk.indexOf(`
377
+ `, searchIndex);
378
+ let lineEnd = -1;
379
+ if (crIndex !== -1 && lfIndex !== -1 ? lineEnd = Math.min(crIndex, lfIndex) : crIndex !== -1 ? lineEnd = crIndex : lfIndex !== -1 && (lineEnd = lfIndex), lineEnd === -1) {
380
+ incompleteLine = chunk.slice(searchIndex);
381
+ break;
382
+ } else {
383
+ const line = chunk.slice(searchIndex, lineEnd);
384
+ lines.push(line), searchIndex = lineEnd + 1, chunk[searchIndex - 1] === "\r" && chunk[searchIndex] === `
385
+ ` && searchIndex++;
386
+ }
387
+ }
388
+ return [lines, incompleteLine];
389
+ }
390
+
391
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/stream.js
392
+ var EventSourceParserStream = class extends TransformStream {
393
+ constructor({ onError, onRetry, onComment } = {}) {
394
+ let parser;
395
+ super({
396
+ start(controller) {
397
+ parser = createParser({
398
+ onEvent: (event) => {
399
+ controller.enqueue(event);
400
+ },
401
+ onError(error) {
402
+ onError === "terminate" ? controller.error(error) : typeof onError == "function" && onError(error);
403
+ },
404
+ onRetry,
405
+ onComment
406
+ });
407
+ },
408
+ transform(chunk) {
409
+ parser.feed(chunk);
410
+ }
411
+ });
412
+ }
31
413
  };
32
414
 
33
- // src/openrouter-facade.ts
34
- import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
415
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
416
+ import * as z4 from "zod/v4";
417
+
418
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/Options.js
419
+ var ignoreOverride = Symbol("Let zodToJsonSchema decide on which parser to use");
420
+
421
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/selectParser.js
422
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind3 } from "zod";
423
+
424
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/array.js
425
+ import { ZodFirstPartyTypeKind } from "zod";
426
+
427
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/record.js
428
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind2 } from "zod";
429
+
430
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/string.js
431
+ var ALPHA_NUMERIC = new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789");
432
+
433
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/object.js
434
+ import { ZodOptional } from "zod";
435
+
436
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
437
+ function combineHeaders(...headers) {
438
+ return headers.reduce(
439
+ (combinedHeaders, currentHeaders) => __spreadValues(__spreadValues({}, combinedHeaders), currentHeaders != null ? currentHeaders : {}),
440
+ {}
441
+ );
442
+ }
443
+ function extractResponseHeaders(response) {
444
+ return Object.fromEntries([...response.headers]);
445
+ }
446
+ var createIdGenerator = ({
447
+ prefix,
448
+ size = 16,
449
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
450
+ separator = "-"
451
+ } = {}) => {
452
+ const generator = () => {
453
+ const alphabetLength = alphabet.length;
454
+ const chars = new Array(size);
455
+ for (let i = 0; i < size; i++) {
456
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
457
+ }
458
+ return chars.join("");
459
+ };
460
+ if (prefix == null) {
461
+ return generator;
462
+ }
463
+ if (alphabet.includes(separator)) {
464
+ throw new InvalidArgumentError({
465
+ argument: "separator",
466
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
467
+ });
468
+ }
469
+ return () => `${prefix}${separator}${generator()}`;
470
+ };
471
+ var generateId = createIdGenerator();
472
+ function isAbortError(error) {
473
+ return error instanceof Error && (error.name === "AbortError" || error.name === "TimeoutError");
474
+ }
475
+ var FETCH_FAILED_ERROR_MESSAGES = ["fetch failed", "failed to fetch"];
476
+ function handleFetchError({
477
+ error,
478
+ url,
479
+ requestBodyValues
480
+ }) {
481
+ if (isAbortError(error)) {
482
+ return error;
483
+ }
484
+ if (error instanceof TypeError && FETCH_FAILED_ERROR_MESSAGES.includes(error.message.toLowerCase())) {
485
+ const cause = error.cause;
486
+ if (cause != null) {
487
+ return new APICallError({
488
+ message: `Cannot connect to API: ${cause.message}`,
489
+ cause,
490
+ url,
491
+ requestBodyValues,
492
+ isRetryable: true
493
+ // retry when network error
494
+ });
495
+ }
496
+ }
497
+ return error;
498
+ }
499
+ function removeUndefinedEntries(record) {
500
+ return Object.fromEntries(
501
+ Object.entries(record).filter(([_key, value]) => value != null)
502
+ );
503
+ }
504
+ function loadApiKey({
505
+ apiKey,
506
+ environmentVariableName,
507
+ apiKeyParameterName = "apiKey",
508
+ description
509
+ }) {
510
+ if (typeof apiKey === "string") {
511
+ return apiKey;
512
+ }
513
+ if (apiKey != null) {
514
+ throw new LoadAPIKeyError({
515
+ message: `${description} API key must be a string.`
516
+ });
517
+ }
518
+ if (typeof process === "undefined") {
519
+ throw new LoadAPIKeyError({
520
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
521
+ });
522
+ }
523
+ apiKey = process.env[environmentVariableName];
524
+ if (apiKey == null) {
525
+ throw new LoadAPIKeyError({
526
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
527
+ });
528
+ }
529
+ if (typeof apiKey !== "string") {
530
+ throw new LoadAPIKeyError({
531
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
532
+ });
533
+ }
534
+ return apiKey;
535
+ }
536
+ var suspectProtoRx = /"__proto__"\s*:/;
537
+ var suspectConstructorRx = /"constructor"\s*:/;
538
+ function _parse(text) {
539
+ const obj = JSON.parse(text);
540
+ if (obj === null || typeof obj !== "object") {
541
+ return obj;
542
+ }
543
+ if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
544
+ return obj;
545
+ }
546
+ return filter(obj);
547
+ }
548
+ function filter(obj) {
549
+ let next = [obj];
550
+ while (next.length) {
551
+ const nodes = next;
552
+ next = [];
553
+ for (const node of nodes) {
554
+ if (Object.prototype.hasOwnProperty.call(node, "__proto__")) {
555
+ throw new SyntaxError("Object contains forbidden prototype property");
556
+ }
557
+ if (Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
558
+ throw new SyntaxError("Object contains forbidden prototype property");
559
+ }
560
+ for (const key in node) {
561
+ const value = node[key];
562
+ if (value && typeof value === "object") {
563
+ next.push(value);
564
+ }
565
+ }
566
+ }
567
+ }
568
+ return obj;
569
+ }
570
+ function secureJsonParse(text) {
571
+ const { stackTraceLimit } = Error;
572
+ Error.stackTraceLimit = 0;
573
+ try {
574
+ return _parse(text);
575
+ } finally {
576
+ Error.stackTraceLimit = stackTraceLimit;
577
+ }
578
+ }
579
+ var validatorSymbol = Symbol.for("vercel.ai.validator");
580
+ function validator(validate) {
581
+ return { [validatorSymbol]: true, validate };
582
+ }
583
+ function isValidator(value) {
584
+ return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
585
+ }
586
+ function asValidator(value) {
587
+ return isValidator(value) ? value : standardSchemaValidator(value);
588
+ }
589
+ function standardSchemaValidator(standardSchema) {
590
+ return validator(async (value) => {
591
+ const result = await standardSchema["~standard"].validate(value);
592
+ return result.issues == null ? { success: true, value: result.value } : {
593
+ success: false,
594
+ error: new TypeValidationError({
595
+ value,
596
+ cause: result.issues
597
+ })
598
+ };
599
+ });
600
+ }
601
+ async function validateTypes({
602
+ value,
603
+ schema
604
+ }) {
605
+ const result = await safeValidateTypes({ value, schema });
606
+ if (!result.success) {
607
+ throw TypeValidationError.wrap({ value, cause: result.error });
608
+ }
609
+ return result.value;
610
+ }
611
+ async function safeValidateTypes({
612
+ value,
613
+ schema
614
+ }) {
615
+ const validator2 = asValidator(schema);
616
+ try {
617
+ if (validator2.validate == null) {
618
+ return { success: true, value, rawValue: value };
619
+ }
620
+ const result = await validator2.validate(value);
621
+ if (result.success) {
622
+ return { success: true, value: result.value, rawValue: value };
623
+ }
624
+ return {
625
+ success: false,
626
+ error: TypeValidationError.wrap({ value, cause: result.error }),
627
+ rawValue: value
628
+ };
629
+ } catch (error) {
630
+ return {
631
+ success: false,
632
+ error: TypeValidationError.wrap({ value, cause: error }),
633
+ rawValue: value
634
+ };
635
+ }
636
+ }
637
+ async function parseJSON({
638
+ text,
639
+ schema
640
+ }) {
641
+ try {
642
+ const value = secureJsonParse(text);
643
+ if (schema == null) {
644
+ return value;
645
+ }
646
+ return validateTypes({ value, schema });
647
+ } catch (error) {
648
+ if (JSONParseError.isInstance(error) || TypeValidationError.isInstance(error)) {
649
+ throw error;
650
+ }
651
+ throw new JSONParseError({ text, cause: error });
652
+ }
653
+ }
654
+ async function safeParseJSON({
655
+ text,
656
+ schema
657
+ }) {
658
+ try {
659
+ const value = secureJsonParse(text);
660
+ if (schema == null) {
661
+ return { success: true, value, rawValue: value };
662
+ }
663
+ return await safeValidateTypes({ value, schema });
664
+ } catch (error) {
665
+ return {
666
+ success: false,
667
+ error: JSONParseError.isInstance(error) ? error : new JSONParseError({ text, cause: error }),
668
+ rawValue: void 0
669
+ };
670
+ }
671
+ }
672
+ function isParsableJson(input) {
673
+ try {
674
+ secureJsonParse(input);
675
+ return true;
676
+ } catch (e) {
677
+ return false;
678
+ }
679
+ }
680
+ function parseJsonEventStream({
681
+ stream,
682
+ schema
683
+ }) {
684
+ return stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
685
+ new TransformStream({
686
+ async transform({ data }, controller) {
687
+ if (data === "[DONE]") {
688
+ return;
689
+ }
690
+ controller.enqueue(await safeParseJSON({ text: data, schema }));
691
+ }
692
+ })
693
+ );
694
+ }
695
+ var getOriginalFetch2 = () => globalThis.fetch;
696
+ var postJsonToApi = async ({
697
+ url,
698
+ headers,
699
+ body,
700
+ failedResponseHandler,
701
+ successfulResponseHandler,
702
+ abortSignal,
703
+ fetch
704
+ }) => postToApi({
705
+ url,
706
+ headers: __spreadValues({
707
+ "Content-Type": "application/json"
708
+ }, headers),
709
+ body: {
710
+ content: JSON.stringify(body),
711
+ values: body
712
+ },
713
+ failedResponseHandler,
714
+ successfulResponseHandler,
715
+ abortSignal,
716
+ fetch
717
+ });
718
+ var postToApi = async ({
719
+ url,
720
+ headers = {},
721
+ body,
722
+ successfulResponseHandler,
723
+ failedResponseHandler,
724
+ abortSignal,
725
+ fetch = getOriginalFetch2()
726
+ }) => {
727
+ try {
728
+ const response = await fetch(url, {
729
+ method: "POST",
730
+ headers: removeUndefinedEntries(headers),
731
+ body: body.content,
732
+ signal: abortSignal
733
+ });
734
+ const responseHeaders = extractResponseHeaders(response);
735
+ if (!response.ok) {
736
+ let errorInformation;
737
+ try {
738
+ errorInformation = await failedResponseHandler({
739
+ response,
740
+ url,
741
+ requestBodyValues: body.values
742
+ });
743
+ } catch (error) {
744
+ if (isAbortError(error) || APICallError.isInstance(error)) {
745
+ throw error;
746
+ }
747
+ throw new APICallError({
748
+ message: "Failed to process error response",
749
+ cause: error,
750
+ statusCode: response.status,
751
+ url,
752
+ responseHeaders,
753
+ requestBodyValues: body.values
754
+ });
755
+ }
756
+ throw errorInformation.value;
757
+ }
758
+ try {
759
+ return await successfulResponseHandler({
760
+ response,
761
+ url,
762
+ requestBodyValues: body.values
763
+ });
764
+ } catch (error) {
765
+ if (error instanceof Error) {
766
+ if (isAbortError(error) || APICallError.isInstance(error)) {
767
+ throw error;
768
+ }
769
+ }
770
+ throw new APICallError({
771
+ message: "Failed to process successful response",
772
+ cause: error,
773
+ statusCode: response.status,
774
+ url,
775
+ responseHeaders,
776
+ requestBodyValues: body.values
777
+ });
778
+ }
779
+ } catch (error) {
780
+ throw handleFetchError({ error, url, requestBodyValues: body.values });
781
+ }
782
+ };
783
+ var createJsonErrorResponseHandler = ({
784
+ errorSchema,
785
+ errorToMessage,
786
+ isRetryable
787
+ }) => async ({ response, url, requestBodyValues }) => {
788
+ const responseBody = await response.text();
789
+ const responseHeaders = extractResponseHeaders(response);
790
+ if (responseBody.trim() === "") {
791
+ return {
792
+ responseHeaders,
793
+ value: new APICallError({
794
+ message: response.statusText,
795
+ url,
796
+ requestBodyValues,
797
+ statusCode: response.status,
798
+ responseHeaders,
799
+ responseBody,
800
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
801
+ })
802
+ };
803
+ }
804
+ try {
805
+ const parsedError = await parseJSON({
806
+ text: responseBody,
807
+ schema: errorSchema
808
+ });
809
+ return {
810
+ responseHeaders,
811
+ value: new APICallError({
812
+ message: errorToMessage(parsedError),
813
+ url,
814
+ requestBodyValues,
815
+ statusCode: response.status,
816
+ responseHeaders,
817
+ responseBody,
818
+ data: parsedError,
819
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
820
+ })
821
+ };
822
+ } catch (parseError) {
823
+ return {
824
+ responseHeaders,
825
+ value: new APICallError({
826
+ message: response.statusText,
827
+ url,
828
+ requestBodyValues,
829
+ statusCode: response.status,
830
+ responseHeaders,
831
+ responseBody,
832
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
833
+ })
834
+ };
835
+ }
836
+ };
837
+ var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
838
+ const responseHeaders = extractResponseHeaders(response);
839
+ if (response.body == null) {
840
+ throw new EmptyResponseBodyError({});
841
+ }
842
+ return {
843
+ responseHeaders,
844
+ value: parseJsonEventStream({
845
+ stream: response.body,
846
+ schema: chunkSchema
847
+ })
848
+ };
849
+ };
850
+ var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
851
+ const responseBody = await response.text();
852
+ const parsedResult = await safeParseJSON({
853
+ text: responseBody,
854
+ schema: responseSchema
855
+ });
856
+ const responseHeaders = extractResponseHeaders(response);
857
+ if (!parsedResult.success) {
858
+ throw new APICallError({
859
+ message: "Invalid JSON response",
860
+ cause: parsedResult.error,
861
+ statusCode: response.status,
862
+ responseHeaders,
863
+ responseBody,
864
+ url,
865
+ requestBodyValues
866
+ });
867
+ }
868
+ return {
869
+ responseHeaders,
870
+ value: parsedResult.value,
871
+ rawValue: parsedResult.rawValue
872
+ };
873
+ };
874
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
875
+ var { btoa, atob } = globalThis;
876
+ function convertUint8ArrayToBase64(array) {
877
+ let latin1string = "";
878
+ for (let i = 0; i < array.length; i++) {
879
+ latin1string += String.fromCodePoint(array[i]);
880
+ }
881
+ return btoa(latin1string);
882
+ }
883
+ function withoutTrailingSlash(url) {
884
+ return url == null ? void 0 : url.replace(/\/$/, "");
885
+ }
886
+
887
+ // src/chat/index.ts
888
+ import { z as z5 } from "zod/v4";
35
889
 
36
890
  // src/schemas/reasoning-details.ts
37
- import { z } from "zod";
891
+ import { z } from "zod/v4";
38
892
  var ReasoningDetailSummarySchema = z.object({
39
893
  type: z.literal("reasoning.summary" /* Summary */),
40
894
  summary: z.string()
@@ -59,56 +913,79 @@ var ReasoningDetailsWithUnknownSchema = z.union([
59
913
  ]);
60
914
  var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
61
915
 
62
- // src/openrouter-chat-language-model.ts
63
- import {
64
- InvalidResponseDataError,
65
- UnsupportedFunctionalityError
66
- } from "@ai-sdk/provider";
67
- import {
68
- combineHeaders,
69
- createEventSourceResponseHandler,
70
- createJsonResponseHandler,
71
- generateId,
72
- isParsableJson,
73
- postJsonToApi
74
- } from "@ai-sdk/provider-utils";
75
- import { z as z3 } from "zod";
916
+ // src/schemas/error-response.ts
917
+ import { z as z2 } from "zod/v4";
918
+ var OpenRouterErrorResponseSchema = z2.object({
919
+ error: z2.object({
920
+ code: z2.union([z2.string(), z2.number()]).nullable(),
921
+ message: z2.string(),
922
+ type: z2.string().nullable(),
923
+ param: z2.any().nullable()
924
+ })
925
+ });
926
+ var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
927
+ errorSchema: OpenRouterErrorResponseSchema,
928
+ errorToMessage: (data) => data.error.message
929
+ });
76
930
 
77
- // src/convert-to-openrouter-chat-messages.ts
78
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
931
+ // src/utils/map-finish-reason.ts
932
+ function mapOpenRouterFinishReason(finishReason) {
933
+ switch (finishReason) {
934
+ case "stop":
935
+ return "stop";
936
+ case "length":
937
+ return "length";
938
+ case "content_filter":
939
+ return "content-filter";
940
+ case "function_call":
941
+ case "tool_calls":
942
+ return "tool-calls";
943
+ default:
944
+ return "unknown";
945
+ }
946
+ }
947
+
948
+ // src/chat/convert-to-openrouter-chat-messages.ts
79
949
  function getCacheControl(providerMetadata) {
80
- var _a, _b, _c;
950
+ var _a15, _b, _c;
81
951
  const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
82
952
  const openrouter2 = providerMetadata == null ? void 0 : providerMetadata.openrouter;
83
- return (_c = (_b = (_a = openrouter2 == null ? void 0 : openrouter2.cacheControl) != null ? _a : openrouter2 == null ? void 0 : openrouter2.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
953
+ return (_c = (_b = (_a15 = openrouter2 == null ? void 0 : openrouter2.cacheControl) != null ? _a15 : openrouter2 == null ? void 0 : openrouter2.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
84
954
  }
85
955
  function convertToOpenRouterChatMessages(prompt) {
86
- var _a, _b, _c;
956
+ var _a15, _b, _c;
87
957
  const messages = [];
88
- for (const { role, content, providerMetadata } of prompt) {
958
+ for (const { role, content, providerOptions } of prompt) {
89
959
  switch (role) {
90
960
  case "system": {
91
961
  messages.push({
92
962
  role: "system",
93
963
  content,
94
- cache_control: getCacheControl(providerMetadata)
964
+ cache_control: getCacheControl(providerOptions)
95
965
  });
96
966
  break;
97
967
  }
98
968
  case "user": {
99
- if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
969
+ if (content.length === 1 && ((_a15 = content[0]) == null ? void 0 : _a15.type) === "text") {
970
+ const cacheControl = (_b = getCacheControl(providerOptions)) != null ? _b : getCacheControl(content[0].providerOptions);
971
+ const contentWithCacheControl = cacheControl ? [
972
+ {
973
+ type: "text",
974
+ text: content[0].text,
975
+ cache_control: cacheControl
976
+ }
977
+ ] : content[0].text;
100
978
  messages.push({
101
979
  role: "user",
102
- content: content[0].text,
103
- cache_control: (_b = getCacheControl(providerMetadata)) != null ? _b : getCacheControl(content[0].providerMetadata)
980
+ content: contentWithCacheControl
104
981
  });
105
982
  break;
106
983
  }
107
- const messageCacheControl = getCacheControl(providerMetadata);
984
+ const messageCacheControl = getCacheControl(providerOptions);
108
985
  const contentParts = content.map(
109
986
  (part) => {
110
- var _a2, _b2, _c2, _d;
111
- const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
987
+ var _a16, _b2, _c2, _d, _e, _f, _g;
988
+ const cacheControl = (_a16 = getCacheControl(part.providerOptions)) != null ? _a16 : messageCacheControl;
112
989
  switch (part.type) {
113
990
  case "text":
114
991
  return {
@@ -117,33 +994,35 @@ function convertToOpenRouterChatMessages(prompt) {
117
994
  // For text parts, only use part-specific cache control
118
995
  cache_control: cacheControl
119
996
  };
120
- case "image":
121
- return {
122
- type: "image_url",
123
- image_url: {
124
- url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
125
- part.image
126
- )}`
127
- },
128
- // For image parts, use part-specific or message-level cache control
129
- cache_control: cacheControl
130
- };
131
997
  case "file":
998
+ if ((_b2 = part.mediaType) == null ? void 0 : _b2.startsWith("image/")) {
999
+ return {
1000
+ type: "image_url",
1001
+ image_url: {
1002
+ url: part.data instanceof URL ? part.data.toString() : `data:${(_c2 = part.mediaType) != null ? _c2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
1003
+ part.data instanceof Uint8Array ? part.data : new Uint8Array()
1004
+ )}`
1005
+ },
1006
+ // For image parts, use part-specific or message-level cache control
1007
+ cache_control: cacheControl
1008
+ };
1009
+ }
132
1010
  return {
133
1011
  type: "file",
134
1012
  file: {
135
1013
  filename: String(
136
- (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
1014
+ (_g = (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.filename) != null ? _f : part.filename) != null ? _g : ""
137
1015
  ),
138
- file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
1016
+ file_data: part.data instanceof Uint8Array ? `data:${part.mediaType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mediaType};base64,${part.data}`
139
1017
  },
140
1018
  cache_control: cacheControl
141
1019
  };
142
1020
  default: {
143
- const _exhaustiveCheck = part;
144
- throw new Error(
145
- `Unsupported content part type: ${_exhaustiveCheck}`
146
- );
1021
+ return {
1022
+ type: "text",
1023
+ text: "",
1024
+ cache_control: cacheControl
1025
+ };
147
1026
  }
148
1027
  }
149
1028
  }
@@ -171,7 +1050,7 @@ function convertToOpenRouterChatMessages(prompt) {
171
1050
  type: "function",
172
1051
  function: {
173
1052
  name: part.toolName,
174
- arguments: JSON.stringify(part.args)
1053
+ arguments: JSON.stringify(part.input)
175
1054
  }
176
1055
  });
177
1056
  break;
@@ -180,23 +1059,14 @@ function convertToOpenRouterChatMessages(prompt) {
180
1059
  reasoning += part.text;
181
1060
  reasoningDetails.push({
182
1061
  type: "reasoning.text" /* Text */,
183
- text: part.text,
184
- signature: part.signature
185
- });
186
- break;
187
- }
188
- case "redacted-reasoning": {
189
- reasoningDetails.push({
190
- type: "reasoning.encrypted" /* Encrypted */,
191
- data: part.data
1062
+ text: part.text
192
1063
  });
193
1064
  break;
194
1065
  }
195
1066
  case "file":
196
1067
  break;
197
1068
  default: {
198
- const _exhaustiveCheck = part;
199
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
1069
+ break;
200
1070
  }
201
1071
  }
202
1072
  }
@@ -206,95 +1076,86 @@ function convertToOpenRouterChatMessages(prompt) {
206
1076
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
207
1077
  reasoning: reasoning || void 0,
208
1078
  reasoning_details: reasoningDetails.length > 0 ? reasoningDetails : void 0,
209
- cache_control: getCacheControl(providerMetadata)
1079
+ cache_control: getCacheControl(providerOptions)
210
1080
  });
211
1081
  break;
212
1082
  }
213
1083
  case "tool": {
214
1084
  for (const toolResponse of content) {
1085
+ const content2 = getToolResultContent(toolResponse);
215
1086
  messages.push({
216
1087
  role: "tool",
217
1088
  tool_call_id: toolResponse.toolCallId,
218
- content: JSON.stringify(toolResponse.result),
219
- cache_control: (_c = getCacheControl(providerMetadata)) != null ? _c : getCacheControl(toolResponse.providerMetadata)
1089
+ content: content2,
1090
+ cache_control: (_c = getCacheControl(providerOptions)) != null ? _c : getCacheControl(toolResponse.providerOptions)
220
1091
  });
221
1092
  }
222
1093
  break;
223
1094
  }
224
1095
  default: {
225
- const _exhaustiveCheck = role;
226
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1096
+ break;
227
1097
  }
228
1098
  }
229
1099
  }
230
1100
  return messages;
231
1101
  }
232
-
233
- // src/map-openrouter-chat-logprobs.ts
234
- function mapOpenRouterChatLogProbsOutput(logprobs) {
235
- var _a, _b;
236
- return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
237
- token,
238
- logprob,
239
- topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
240
- token: token2,
241
- logprob: logprob2
242
- })) : []
243
- }))) != null ? _b : void 0;
1102
+ function getToolResultContent(input) {
1103
+ return input.output.type === "text" ? input.output.value : JSON.stringify(input.output.value);
244
1104
  }
245
1105
 
246
- // src/map-openrouter-finish-reason.ts
247
- function mapOpenRouterFinishReason(finishReason) {
248
- switch (finishReason) {
249
- case "stop":
250
- return "stop";
251
- case "length":
252
- return "length";
253
- case "content_filter":
254
- return "content-filter";
255
- case "function_call":
256
- case "tool_calls":
257
- return "tool-calls";
258
- default:
259
- return "unknown";
1106
+ // src/chat/get-tool-choice.ts
1107
+ import { z as z3 } from "zod/v4";
1108
+ var ChatCompletionToolChoiceSchema = z3.union([
1109
+ z3.literal("auto"),
1110
+ z3.literal("none"),
1111
+ z3.literal("required"),
1112
+ z3.object({
1113
+ type: z3.literal("function"),
1114
+ function: z3.object({
1115
+ name: z3.string()
1116
+ })
1117
+ })
1118
+ ]);
1119
+ function getChatCompletionToolChoice(toolChoice) {
1120
+ switch (toolChoice.type) {
1121
+ case "auto":
1122
+ case "none":
1123
+ case "required":
1124
+ return toolChoice.type;
1125
+ case "tool": {
1126
+ return {
1127
+ type: "function",
1128
+ function: { name: toolChoice.toolName }
1129
+ };
1130
+ }
1131
+ default: {
1132
+ toolChoice;
1133
+ throw new Error(`Invalid tool choice type: ${toolChoice}`);
1134
+ }
260
1135
  }
261
1136
  }
262
1137
 
263
- // src/openrouter-error.ts
264
- import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
265
- import { z as z2 } from "zod";
266
- var OpenRouterErrorResponseSchema = z2.object({
267
- error: z2.object({
268
- message: z2.string(),
269
- type: z2.string(),
270
- param: z2.any().nullable(),
271
- code: z2.string().nullable()
272
- })
273
- });
274
- var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
275
- errorSchema: OpenRouterErrorResponseSchema,
276
- errorToMessage: (data) => data.error.message
277
- });
278
-
279
- // src/openrouter-chat-language-model.ts
280
- function isFunctionTool(tool) {
281
- return "parameters" in tool;
282
- }
1138
+ // src/chat/index.ts
283
1139
  var OpenRouterChatLanguageModel = class {
284
1140
  constructor(modelId, settings, config) {
285
- this.specificationVersion = "v1";
1141
+ this.specificationVersion = "v2";
1142
+ this.provider = "openrouter";
286
1143
  this.defaultObjectGenerationMode = "tool";
1144
+ this.supportedUrls = {
1145
+ "image/*": [
1146
+ /^data:image\/[a-zA-Z]+;base64,/,
1147
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1148
+ ],
1149
+ // 'text/*': [/^data:text\//, /^https?:\/\/.+$/],
1150
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1151
+ };
287
1152
  this.modelId = modelId;
288
1153
  this.settings = settings;
289
1154
  this.config = config;
290
1155
  }
291
- get provider() {
292
- return this.config.provider;
293
- }
294
1156
  getArgs({
295
- mode,
296
1157
  prompt,
297
- maxTokens,
1158
+ maxOutputTokens,
298
1159
  temperature,
299
1160
  topP,
300
1161
  frequencyPenalty,
@@ -303,12 +1164,10 @@ var OpenRouterChatLanguageModel = class {
303
1164
  stopSequences,
304
1165
  responseFormat,
305
1166
  topK,
306
- providerMetadata
1167
+ tools,
1168
+ toolChoice
307
1169
  }) {
308
- var _a;
309
- const type = mode.type;
310
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
311
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1170
+ const baseArgs = __spreadValues(__spreadValues({
312
1171
  // model id:
313
1172
  model: this.modelId,
314
1173
  models: this.settings.models,
@@ -319,7 +1178,7 @@ var OpenRouterChatLanguageModel = class {
319
1178
  user: this.settings.user,
320
1179
  parallel_tool_calls: this.settings.parallelToolCalls,
321
1180
  // standardized settings:
322
- max_tokens: maxTokens,
1181
+ max_tokens: maxOutputTokens,
323
1182
  temperature,
324
1183
  top_p: topP,
325
1184
  frequency_penalty: frequencyPenalty,
@@ -334,44 +1193,34 @@ var OpenRouterChatLanguageModel = class {
334
1193
  include_reasoning: this.settings.includeReasoning,
335
1194
  reasoning: this.settings.reasoning,
336
1195
  usage: this.settings.usage
337
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
338
- switch (type) {
339
- case "regular": {
340
- return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
341
- }
342
- case "object-json": {
343
- return __spreadProps(__spreadValues({}, baseArgs), {
344
- response_format: { type: "json_object" }
345
- });
346
- }
347
- case "object-tool": {
348
- return __spreadProps(__spreadValues({}, baseArgs), {
349
- tool_choice: { type: "function", function: { name: mode.tool.name } },
350
- tools: [
351
- {
352
- type: "function",
353
- function: {
354
- name: mode.tool.name,
355
- description: mode.tool.description,
356
- parameters: mode.tool.parameters
357
- }
358
- }
359
- ]
360
- });
361
- }
362
- // Handle all non-text types with a single default case
363
- default: {
364
- const _exhaustiveCheck = type;
365
- throw new UnsupportedFunctionalityError({
366
- functionality: `${_exhaustiveCheck} mode`
367
- });
368
- }
1196
+ }, this.config.extraBody), this.settings.extraBody);
1197
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
1198
+ return __spreadProps(__spreadValues({}, baseArgs), {
1199
+ response_format: { type: "json_object" }
1200
+ });
369
1201
  }
1202
+ if (tools && tools.length > 0) {
1203
+ const mappedTools = tools.filter((tool) => tool.type === "function").map((tool) => ({
1204
+ type: "function",
1205
+ function: {
1206
+ name: tool.name,
1207
+ description: tool.type,
1208
+ parameters: tool.inputSchema
1209
+ }
1210
+ }));
1211
+ return __spreadProps(__spreadValues({}, baseArgs), {
1212
+ tools: mappedTools,
1213
+ tool_choice: toolChoice ? getChatCompletionToolChoice(toolChoice) : void 0
1214
+ });
1215
+ }
1216
+ return baseArgs;
370
1217
  }
371
1218
  async doGenerate(options) {
372
- var _b, _c, _d, _e, _f, _g, _h, _i, _j;
373
- const args = this.getArgs(options);
374
- const { responseHeaders, value: response } = await postJsonToApi({
1219
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t;
1220
+ const providerOptions = options.providerOptions || {};
1221
+ const openrouterOptions = providerOptions.openrouter || {};
1222
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1223
+ const { value: response, responseHeaders } = await postJsonToApi({
375
1224
  url: this.config.url({
376
1225
  path: "/chat/completions",
377
1226
  modelId: this.modelId
@@ -385,46 +1234,33 @@ var OpenRouterChatLanguageModel = class {
385
1234
  abortSignal: options.abortSignal,
386
1235
  fetch: this.config.fetch
387
1236
  });
388
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
389
1237
  const choice = response.choices[0];
390
1238
  if (!choice) {
391
1239
  throw new Error("No choice in response");
392
1240
  }
393
1241
  const usageInfo = response.usage ? {
394
- promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
395
- completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
1242
+ inputTokens: (_a15 = response.usage.prompt_tokens) != null ? _a15 : 0,
1243
+ outputTokens: (_b = response.usage.completion_tokens) != null ? _b : 0,
1244
+ totalTokens: ((_c = response.usage.prompt_tokens) != null ? _c : 0) + ((_d = response.usage.completion_tokens) != null ? _d : 0),
1245
+ reasoningTokens: (_f = (_e = response.usage.completion_tokens_details) == null ? void 0 : _e.reasoning_tokens) != null ? _f : 0,
1246
+ cachedInputTokens: (_h = (_g = response.usage.prompt_tokens_details) == null ? void 0 : _g.cached_tokens) != null ? _h : 0
396
1247
  } : {
397
- promptTokens: 0,
398
- completionTokens: 0
1248
+ inputTokens: 0,
1249
+ outputTokens: 0,
1250
+ totalTokens: 0,
1251
+ reasoningTokens: 0,
1252
+ cachedInputTokens: 0
399
1253
  };
400
- const providerMetadata = {};
401
- if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
402
- providerMetadata.openrouter = {
403
- usage: {
404
- promptTokens: response.usage.prompt_tokens,
405
- promptTokensDetails: response.usage.prompt_tokens_details ? {
406
- cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
407
- } : void 0,
408
- completionTokens: response.usage.completion_tokens,
409
- completionTokensDetails: response.usage.completion_tokens_details ? {
410
- reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
411
- } : void 0,
412
- cost: response.usage.cost,
413
- totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
414
- }
415
- };
416
- }
417
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
418
- const reasoningDetails = (_h = choice.message.reasoning_details) != null ? _h : [];
419
- const reasoning = reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
420
- var _a2;
1254
+ const reasoningDetails = (_i = choice.message.reasoning_details) != null ? _i : [];
1255
+ reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
1256
+ var _a16;
421
1257
  switch (detail.type) {
422
1258
  case "reasoning.text" /* Text */: {
423
1259
  if (detail.text) {
424
1260
  return {
425
1261
  type: "text",
426
1262
  text: detail.text,
427
- signature: (_a2 = detail.signature) != null ? _a2 : void 0
1263
+ signature: (_a16 = detail.signature) != null ? _a16 : void 0
428
1264
  };
429
1265
  }
430
1266
  break;
@@ -458,34 +1294,58 @@ var OpenRouterChatLanguageModel = class {
458
1294
  text: choice.message.reasoning
459
1295
  }
460
1296
  ] : [];
461
- return __spreadValues({
462
- response: {
463
- id: response.id,
464
- modelId: response.model
465
- },
466
- text: (_i = choice.message.content) != null ? _i : void 0,
467
- reasoning,
468
- toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
469
- var _a2;
470
- return {
471
- toolCallType: "function",
472
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1297
+ const content = [];
1298
+ if (choice.message.content) {
1299
+ content.push({
1300
+ type: "text",
1301
+ text: choice.message.content
1302
+ });
1303
+ }
1304
+ if (choice.message.tool_calls) {
1305
+ for (const toolCall of choice.message.tool_calls) {
1306
+ content.push({
1307
+ type: "tool-call",
1308
+ toolCallId: (_j = toolCall.id) != null ? _j : generateId(),
473
1309
  toolName: toolCall.function.name,
474
- args: toolCall.function.arguments
475
- };
476
- }),
1310
+ input: toolCall.function.arguments
1311
+ });
1312
+ }
1313
+ }
1314
+ return {
1315
+ content,
477
1316
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
478
1317
  usage: usageInfo,
479
- rawCall: { rawPrompt, rawSettings },
480
- rawResponse: { headers: responseHeaders },
481
1318
  warnings: [],
482
- logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
483
- }, hasProviderMetadata ? { providerMetadata } : {});
1319
+ providerMetadata: {
1320
+ openrouter: {
1321
+ usage: {
1322
+ promptTokens: (_k = usageInfo.inputTokens) != null ? _k : 0,
1323
+ completionTokens: (_l = usageInfo.outputTokens) != null ? _l : 0,
1324
+ totalTokens: (_m = usageInfo.totalTokens) != null ? _m : 0,
1325
+ cost: (_n = response.usage) == null ? void 0 : _n.cost,
1326
+ promptTokensDetails: {
1327
+ cachedTokens: (_q = (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : 0
1328
+ },
1329
+ completionTokensDetails: {
1330
+ reasoningTokens: (_t = (_s = (_r = response.usage) == null ? void 0 : _r.completion_tokens_details) == null ? void 0 : _s.reasoning_tokens) != null ? _t : 0
1331
+ }
1332
+ }
1333
+ }
1334
+ },
1335
+ request: { body: args },
1336
+ response: {
1337
+ id: response.id,
1338
+ modelId: response.model,
1339
+ headers: responseHeaders
1340
+ }
1341
+ };
484
1342
  }
485
1343
  async doStream(options) {
486
- var _a, _c;
487
- const args = this.getArgs(options);
488
- const { responseHeaders, value: response } = await postJsonToApi({
1344
+ var _a15;
1345
+ const providerOptions = options.providerOptions || {};
1346
+ const openrouterOptions = providerOptions.openrouter || {};
1347
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1348
+ const { value: response, responseHeaders } = await postJsonToApi({
489
1349
  url: this.config.url({
490
1350
  path: "/chat/completions",
491
1351
  modelId: this.modelId
@@ -496,7 +1356,7 @@ var OpenRouterChatLanguageModel = class {
496
1356
  // only include stream_options when in strict compatibility mode:
497
1357
  stream_options: this.config.compatibility === "strict" ? __spreadValues({
498
1358
  include_usage: true
499
- }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
1359
+ }, ((_a15 = this.settings.usage) == null ? void 0 : _a15.include) ? { include_usage: true } : {}) : void 0
500
1360
  }),
501
1361
  failedResponseHandler: openrouterFailedResponseHandler,
502
1362
  successfulResponseHandler: createEventSourceResponseHandler(
@@ -505,21 +1365,21 @@ var OpenRouterChatLanguageModel = class {
505
1365
  abortSignal: options.abortSignal,
506
1366
  fetch: this.config.fetch
507
1367
  });
508
- const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
509
1368
  const toolCalls = [];
510
1369
  let finishReason = "other";
511
- let usage = {
512
- promptTokens: Number.NaN,
513
- completionTokens: Number.NaN
1370
+ const usage = {
1371
+ inputTokens: Number.NaN,
1372
+ outputTokens: Number.NaN,
1373
+ totalTokens: Number.NaN,
1374
+ reasoningTokens: Number.NaN,
1375
+ cachedInputTokens: Number.NaN
514
1376
  };
515
- let logprobs;
516
1377
  const openrouterUsage = {};
517
- const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
518
1378
  return {
519
1379
  stream: response.pipeThrough(
520
1380
  new TransformStream({
521
1381
  transform(chunk, controller) {
522
- var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
1382
+ var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
523
1383
  if (!chunk.success) {
524
1384
  finishReason = "error";
525
1385
  controller.enqueue({ type: "error", error: chunk.error });
@@ -544,20 +1404,23 @@ var OpenRouterChatLanguageModel = class {
544
1404
  });
545
1405
  }
546
1406
  if (value.usage != null) {
547
- usage = {
548
- promptTokens: value.usage.prompt_tokens,
549
- completionTokens: value.usage.completion_tokens
550
- };
1407
+ usage.inputTokens = value.usage.prompt_tokens;
1408
+ usage.outputTokens = value.usage.completion_tokens;
1409
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
551
1410
  openrouterUsage.promptTokens = value.usage.prompt_tokens;
552
1411
  if (value.usage.prompt_tokens_details) {
1412
+ const cachedInputTokens = (_a16 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a16 : 0;
1413
+ usage.cachedInputTokens = cachedInputTokens;
553
1414
  openrouterUsage.promptTokensDetails = {
554
- cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
1415
+ cachedTokens: cachedInputTokens
555
1416
  };
556
1417
  }
557
1418
  openrouterUsage.completionTokens = value.usage.completion_tokens;
558
1419
  if (value.usage.completion_tokens_details) {
1420
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
1421
+ usage.reasoningTokens = reasoningTokens;
559
1422
  openrouterUsage.completionTokensDetails = {
560
- reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
1423
+ reasoningTokens
561
1424
  };
562
1425
  }
563
1426
  openrouterUsage.cost = value.usage.cost;
@@ -574,13 +1437,15 @@ var OpenRouterChatLanguageModel = class {
574
1437
  if (delta.content != null) {
575
1438
  controller.enqueue({
576
1439
  type: "text-delta",
577
- textDelta: delta.content
1440
+ delta: delta.content,
1441
+ id: generateId()
578
1442
  });
579
1443
  }
580
1444
  if (delta.reasoning != null) {
581
1445
  controller.enqueue({
582
- type: "reasoning",
583
- textDelta: delta.reasoning
1446
+ type: "reasoning-delta",
1447
+ delta: delta.reasoning,
1448
+ id: generateId()
584
1449
  });
585
1450
  }
586
1451
  if (delta.reasoning_details && delta.reasoning_details.length > 0) {
@@ -589,14 +1454,15 @@ var OpenRouterChatLanguageModel = class {
589
1454
  case "reasoning.text" /* Text */: {
590
1455
  if (detail.text) {
591
1456
  controller.enqueue({
592
- type: "reasoning",
593
- textDelta: detail.text
1457
+ type: "reasoning-delta",
1458
+ delta: detail.text,
1459
+ id: generateId()
594
1460
  });
595
1461
  }
596
1462
  if (detail.signature) {
597
1463
  controller.enqueue({
598
- type: "reasoning-signature",
599
- signature: detail.signature
1464
+ type: "reasoning-end",
1465
+ id: generateId()
600
1466
  });
601
1467
  }
602
1468
  break;
@@ -604,8 +1470,9 @@ var OpenRouterChatLanguageModel = class {
604
1470
  case "reasoning.encrypted" /* Encrypted */: {
605
1471
  if (detail.data) {
606
1472
  controller.enqueue({
607
- type: "redacted-reasoning",
608
- data: detail.data
1473
+ type: "reasoning-delta",
1474
+ delta: "[REDACTED]",
1475
+ id: generateId()
609
1476
  });
610
1477
  }
611
1478
  break;
@@ -613,8 +1480,9 @@ var OpenRouterChatLanguageModel = class {
613
1480
  case "reasoning.summary" /* Summary */: {
614
1481
  if (detail.summary) {
615
1482
  controller.enqueue({
616
- type: "reasoning",
617
- textDelta: detail.summary
1483
+ type: "reasoning-delta",
1484
+ delta: detail.summary,
1485
+ id: generateId()
618
1486
  });
619
1487
  }
620
1488
  break;
@@ -626,15 +1494,6 @@ var OpenRouterChatLanguageModel = class {
626
1494
  }
627
1495
  }
628
1496
  }
629
- const mappedLogprobs = mapOpenRouterChatLogProbsOutput(
630
- choice == null ? void 0 : choice.logprobs
631
- );
632
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
633
- if (logprobs === void 0) {
634
- logprobs = [];
635
- }
636
- logprobs.push(...mappedLogprobs);
637
- }
638
1497
  if (delta.tool_calls != null) {
639
1498
  for (const toolCallDelta of delta.tool_calls) {
640
1499
  const index = toolCallDelta.index;
@@ -651,7 +1510,7 @@ var OpenRouterChatLanguageModel = class {
651
1510
  message: `Expected 'id' to be a string.`
652
1511
  });
653
1512
  }
654
- if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
1513
+ if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
655
1514
  throw new InvalidResponseDataError({
656
1515
  data: toolCallDelta,
657
1516
  message: `Expected 'function.name' to be a string.`
@@ -672,18 +1531,24 @@ var OpenRouterChatLanguageModel = class {
672
1531
  }
673
1532
  if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
674
1533
  controller.enqueue({
675
- type: "tool-call-delta",
676
- toolCallType: "function",
677
- toolCallId: toolCall2.id,
678
- toolName: toolCall2.function.name,
679
- argsTextDelta: toolCall2.function.arguments
1534
+ type: "tool-input-start",
1535
+ id: toolCall2.id,
1536
+ toolName: toolCall2.function.name
1537
+ });
1538
+ controller.enqueue({
1539
+ type: "tool-input-delta",
1540
+ id: toolCall2.id,
1541
+ delta: toolCall2.function.arguments
1542
+ });
1543
+ controller.enqueue({
1544
+ type: "tool-input-end",
1545
+ id: toolCall2.id
680
1546
  });
681
1547
  controller.enqueue({
682
1548
  type: "tool-call",
683
- toolCallType: "function",
684
- toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
1549
+ toolCallId: toolCall2.id,
685
1550
  toolName: toolCall2.function.name,
686
- args: toolCall2.function.arguments
1551
+ input: toolCall2.function.arguments
687
1552
  });
688
1553
  toolCall2.sent = true;
689
1554
  }
@@ -693,23 +1558,27 @@ var OpenRouterChatLanguageModel = class {
693
1558
  if (toolCall == null) {
694
1559
  throw new Error("Tool call is missing");
695
1560
  }
1561
+ if (((_g = toolCallDelta.function) == null ? void 0 : _g.name) != null) {
1562
+ controller.enqueue({
1563
+ type: "tool-input-start",
1564
+ id: toolCall.id,
1565
+ toolName: toolCall.function.name
1566
+ });
1567
+ }
696
1568
  if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
697
1569
  toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
698
1570
  }
699
1571
  controller.enqueue({
700
- type: "tool-call-delta",
701
- toolCallType: "function",
702
- toolCallId: toolCall.id,
703
- toolName: toolCall.function.name,
704
- argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
1572
+ type: "tool-input-delta",
1573
+ id: toolCall.id,
1574
+ delta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
705
1575
  });
706
1576
  if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
707
1577
  controller.enqueue({
708
1578
  type: "tool-call",
709
- toolCallType: "function",
710
1579
  toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
711
1580
  toolName: toolCall.function.name,
712
- args: toolCall.function.arguments
1581
+ input: toolCall.function.arguments
713
1582
  });
714
1583
  toolCall.sent = true;
715
1584
  }
@@ -717,206 +1586,141 @@ var OpenRouterChatLanguageModel = class {
717
1586
  }
718
1587
  },
719
1588
  flush(controller) {
720
- var _a2;
1589
+ var _a16;
721
1590
  if (finishReason === "tool-calls") {
722
1591
  for (const toolCall of toolCalls) {
723
1592
  if (!toolCall.sent) {
724
1593
  controller.enqueue({
725
1594
  type: "tool-call",
726
- toolCallType: "function",
727
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1595
+ toolCallId: (_a16 = toolCall.id) != null ? _a16 : generateId(),
728
1596
  toolName: toolCall.function.name,
729
1597
  // Coerce invalid arguments to an empty JSON object
730
- args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
1598
+ input: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
731
1599
  });
732
1600
  toolCall.sent = true;
733
1601
  }
734
1602
  }
735
1603
  }
736
- const providerMetadata = {};
737
- if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
738
- providerMetadata.openrouter = {
739
- usage: openrouterUsage
740
- };
741
- }
742
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
743
- controller.enqueue(__spreadValues({
1604
+ controller.enqueue({
744
1605
  type: "finish",
745
1606
  finishReason,
746
- logprobs,
747
- usage
748
- }, hasProviderMetadata ? { providerMetadata } : {}));
1607
+ usage,
1608
+ providerMetadata: {
1609
+ openrouter: {
1610
+ usage: openrouterUsage
1611
+ }
1612
+ }
1613
+ });
749
1614
  }
750
1615
  })
751
1616
  ),
752
- rawCall: { rawPrompt, rawSettings },
753
- rawResponse: { headers: responseHeaders },
754
- warnings: []
1617
+ warnings: [],
1618
+ request: { body: args },
1619
+ response: { headers: responseHeaders }
755
1620
  };
756
1621
  }
757
1622
  };
758
- var OpenRouterChatCompletionBaseResponseSchema = z3.object({
759
- id: z3.string().optional(),
760
- model: z3.string().optional(),
761
- usage: z3.object({
762
- prompt_tokens: z3.number(),
763
- prompt_tokens_details: z3.object({
764
- cached_tokens: z3.number()
1623
+ var OpenRouterChatCompletionBaseResponseSchema = z5.object({
1624
+ id: z5.string().optional(),
1625
+ model: z5.string().optional(),
1626
+ usage: z5.object({
1627
+ prompt_tokens: z5.number(),
1628
+ prompt_tokens_details: z5.object({
1629
+ cached_tokens: z5.number()
765
1630
  }).nullish(),
766
- completion_tokens: z3.number(),
767
- completion_tokens_details: z3.object({
768
- reasoning_tokens: z3.number()
1631
+ completion_tokens: z5.number(),
1632
+ completion_tokens_details: z5.object({
1633
+ reasoning_tokens: z5.number()
769
1634
  }).nullish(),
770
- total_tokens: z3.number(),
771
- cost: z3.number().optional()
1635
+ total_tokens: z5.number(),
1636
+ cost: z5.number().optional()
772
1637
  }).nullish()
773
1638
  });
774
1639
  var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
775
- choices: z3.array(
776
- z3.object({
777
- message: z3.object({
778
- role: z3.literal("assistant"),
779
- content: z3.string().nullable().optional(),
780
- reasoning: z3.string().nullable().optional(),
1640
+ choices: z5.array(
1641
+ z5.object({
1642
+ message: z5.object({
1643
+ role: z5.literal("assistant"),
1644
+ content: z5.string().nullable().optional(),
1645
+ reasoning: z5.string().nullable().optional(),
781
1646
  reasoning_details: ReasoningDetailArraySchema.nullish(),
782
- tool_calls: z3.array(
783
- z3.object({
784
- id: z3.string().optional().nullable(),
785
- type: z3.literal("function"),
786
- function: z3.object({
787
- name: z3.string(),
788
- arguments: z3.string()
1647
+ tool_calls: z5.array(
1648
+ z5.object({
1649
+ id: z5.string().optional().nullable(),
1650
+ type: z5.literal("function"),
1651
+ function: z5.object({
1652
+ name: z5.string(),
1653
+ arguments: z5.string()
789
1654
  })
790
1655
  })
791
1656
  ).optional()
792
1657
  }),
793
- index: z3.number(),
794
- logprobs: z3.object({
795
- content: z3.array(
796
- z3.object({
797
- token: z3.string(),
798
- logprob: z3.number(),
799
- top_logprobs: z3.array(
800
- z3.object({
801
- token: z3.string(),
802
- logprob: z3.number()
1658
+ index: z5.number(),
1659
+ logprobs: z5.object({
1660
+ content: z5.array(
1661
+ z5.object({
1662
+ token: z5.string(),
1663
+ logprob: z5.number(),
1664
+ top_logprobs: z5.array(
1665
+ z5.object({
1666
+ token: z5.string(),
1667
+ logprob: z5.number()
803
1668
  })
804
1669
  )
805
1670
  })
806
1671
  ).nullable()
807
1672
  }).nullable().optional(),
808
- finish_reason: z3.string().optional().nullable()
1673
+ finish_reason: z5.string().optional().nullable()
809
1674
  })
810
1675
  )
811
1676
  });
812
- var OpenRouterStreamChatCompletionChunkSchema = z3.union([
1677
+ var OpenRouterStreamChatCompletionChunkSchema = z5.union([
813
1678
  OpenRouterChatCompletionBaseResponseSchema.extend({
814
- choices: z3.array(
815
- z3.object({
816
- delta: z3.object({
817
- role: z3.enum(["assistant"]).optional(),
818
- content: z3.string().nullish(),
819
- reasoning: z3.string().nullish().optional(),
1679
+ choices: z5.array(
1680
+ z5.object({
1681
+ delta: z5.object({
1682
+ role: z5.enum(["assistant"]).optional(),
1683
+ content: z5.string().nullish(),
1684
+ reasoning: z5.string().nullish().optional(),
820
1685
  reasoning_details: ReasoningDetailArraySchema.nullish(),
821
- tool_calls: z3.array(
822
- z3.object({
823
- index: z3.number(),
824
- id: z3.string().nullish(),
825
- type: z3.literal("function").optional(),
826
- function: z3.object({
827
- name: z3.string().nullish(),
828
- arguments: z3.string().nullish()
1686
+ tool_calls: z5.array(
1687
+ z5.object({
1688
+ index: z5.number(),
1689
+ id: z5.string().nullish(),
1690
+ type: z5.literal("function").optional(),
1691
+ function: z5.object({
1692
+ name: z5.string().nullish(),
1693
+ arguments: z5.string().nullish()
829
1694
  })
830
1695
  })
831
1696
  ).nullish()
832
1697
  }).nullish(),
833
- logprobs: z3.object({
834
- content: z3.array(
835
- z3.object({
836
- token: z3.string(),
837
- logprob: z3.number(),
838
- top_logprobs: z3.array(
839
- z3.object({
840
- token: z3.string(),
841
- logprob: z3.number()
1698
+ logprobs: z5.object({
1699
+ content: z5.array(
1700
+ z5.object({
1701
+ token: z5.string(),
1702
+ logprob: z5.number(),
1703
+ top_logprobs: z5.array(
1704
+ z5.object({
1705
+ token: z5.string(),
1706
+ logprob: z5.number()
842
1707
  })
843
1708
  )
844
1709
  })
845
1710
  ).nullable()
846
1711
  }).nullish(),
847
- finish_reason: z3.string().nullable().optional(),
848
- index: z3.number()
1712
+ finish_reason: z5.string().nullable().optional(),
1713
+ index: z5.number()
849
1714
  })
850
1715
  )
851
1716
  }),
852
1717
  OpenRouterErrorResponseSchema
853
1718
  ]);
854
- function prepareToolsAndToolChoice(mode) {
855
- var _a;
856
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
857
- if (tools == null) {
858
- return { tools: void 0, tool_choice: void 0 };
859
- }
860
- const mappedTools = tools.map((tool) => {
861
- if (isFunctionTool(tool)) {
862
- return {
863
- type: "function",
864
- function: {
865
- name: tool.name,
866
- description: tool.description,
867
- parameters: tool.parameters
868
- }
869
- };
870
- }
871
- return {
872
- type: "function",
873
- function: {
874
- name: tool.name
875
- }
876
- };
877
- });
878
- const toolChoice = mode.toolChoice;
879
- if (toolChoice == null) {
880
- return { tools: mappedTools, tool_choice: void 0 };
881
- }
882
- const type = toolChoice.type;
883
- switch (type) {
884
- case "auto":
885
- case "none":
886
- case "required":
887
- return { tools: mappedTools, tool_choice: type };
888
- case "tool":
889
- return {
890
- tools: mappedTools,
891
- tool_choice: {
892
- type: "function",
893
- function: {
894
- name: toolChoice.toolName
895
- }
896
- }
897
- };
898
- default: {
899
- const _exhaustiveCheck = type;
900
- throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
901
- }
902
- }
903
- }
904
1719
 
905
- // src/openrouter-completion-language-model.ts
906
- import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
907
- import {
908
- combineHeaders as combineHeaders2,
909
- createEventSourceResponseHandler as createEventSourceResponseHandler2,
910
- createJsonResponseHandler as createJsonResponseHandler2,
911
- postJsonToApi as postJsonToApi2
912
- } from "@ai-sdk/provider-utils";
913
- import { z as z4 } from "zod";
1720
+ // src/completion/index.ts
1721
+ import { z as z6 } from "zod/v4";
914
1722
 
915
- // src/convert-to-openrouter-completion-prompt.ts
916
- import {
917
- InvalidPromptError,
918
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
919
- } from "@ai-sdk/provider";
1723
+ // src/completion/convert-to-openrouter-completion-prompt.ts
920
1724
  function convertToOpenRouterCompletionPrompt({
921
1725
  prompt,
922
1726
  inputFormat,
@@ -937,7 +1741,7 @@ function convertToOpenRouterCompletionPrompt({
937
1741
  switch (role) {
938
1742
  case "system": {
939
1743
  throw new InvalidPromptError({
940
- message: "Unexpected system message in prompt: ${content}",
1744
+ message: `Unexpected system message in prompt: ${content}`,
941
1745
  prompt
942
1746
  });
943
1747
  }
@@ -947,21 +1751,13 @@ function convertToOpenRouterCompletionPrompt({
947
1751
  case "text": {
948
1752
  return part.text;
949
1753
  }
950
- case "image": {
951
- throw new UnsupportedFunctionalityError2({
952
- functionality: "images"
953
- });
954
- }
955
1754
  case "file": {
956
- throw new UnsupportedFunctionalityError2({
1755
+ throw new UnsupportedFunctionalityError({
957
1756
  functionality: "file attachments"
958
1757
  });
959
1758
  }
960
1759
  default: {
961
- const _exhaustiveCheck = part;
962
- throw new Error(
963
- `Unsupported content type: ${_exhaustiveCheck}`
964
- );
1760
+ return "";
965
1761
  }
966
1762
  }
967
1763
  }).join("");
@@ -972,39 +1768,38 @@ ${userMessage}
972
1768
  break;
973
1769
  }
974
1770
  case "assistant": {
975
- const assistantMessage = content.map((part) => {
976
- switch (part.type) {
977
- case "text": {
978
- return part.text;
979
- }
980
- case "tool-call": {
981
- throw new UnsupportedFunctionalityError2({
982
- functionality: "tool-call messages"
983
- });
984
- }
985
- case "reasoning": {
986
- throw new UnsupportedFunctionalityError2({
987
- functionality: "reasoning messages"
988
- });
989
- }
990
- case "redacted-reasoning": {
991
- throw new UnsupportedFunctionalityError2({
992
- functionality: "redacted reasoning messages"
993
- });
994
- }
995
- case "file": {
996
- throw new UnsupportedFunctionalityError2({
997
- functionality: "file attachments"
998
- });
999
- }
1000
- default: {
1001
- const _exhaustiveCheck = part;
1002
- throw new Error(
1003
- `Unsupported content type: ${_exhaustiveCheck}`
1004
- );
1771
+ const assistantMessage = content.map(
1772
+ (part) => {
1773
+ switch (part.type) {
1774
+ case "text": {
1775
+ return part.text;
1776
+ }
1777
+ case "tool-call": {
1778
+ throw new UnsupportedFunctionalityError({
1779
+ functionality: "tool-call messages"
1780
+ });
1781
+ }
1782
+ case "tool-result": {
1783
+ throw new UnsupportedFunctionalityError({
1784
+ functionality: "tool-result messages"
1785
+ });
1786
+ }
1787
+ case "reasoning": {
1788
+ throw new UnsupportedFunctionalityError({
1789
+ functionality: "reasoning messages"
1790
+ });
1791
+ }
1792
+ case "file": {
1793
+ throw new UnsupportedFunctionalityError({
1794
+ functionality: "file attachments"
1795
+ });
1796
+ }
1797
+ default: {
1798
+ return "";
1799
+ }
1005
1800
  }
1006
1801
  }
1007
- }).join("");
1802
+ ).join("");
1008
1803
  text += `${assistant}:
1009
1804
  ${assistantMessage}
1010
1805
 
@@ -1012,13 +1807,12 @@ ${assistantMessage}
1012
1807
  break;
1013
1808
  }
1014
1809
  case "tool": {
1015
- throw new UnsupportedFunctionalityError2({
1810
+ throw new UnsupportedFunctionalityError({
1016
1811
  functionality: "tool messages"
1017
1812
  });
1018
1813
  }
1019
1814
  default: {
1020
- const _exhaustiveCheck = role;
1021
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1815
+ break;
1022
1816
  }
1023
1817
  }
1024
1818
  }
@@ -1029,40 +1823,60 @@ ${assistantMessage}
1029
1823
  };
1030
1824
  }
1031
1825
 
1032
- // src/map-openrouter-completion-logprobs.ts
1033
- function mapOpenRouterCompletionLogProbs(logprobs) {
1034
- return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => {
1035
- var _a, _b;
1036
- return {
1037
- token,
1038
- logprob: (_a = logprobs.token_logprobs[index]) != null ? _a : 0,
1039
- topLogprobs: logprobs.top_logprobs ? Object.entries((_b = logprobs.top_logprobs[index]) != null ? _b : {}).map(
1040
- ([token2, logprob]) => ({
1041
- token: token2,
1042
- logprob
1043
- })
1044
- ) : []
1045
- };
1046
- });
1047
- }
1048
-
1049
- // src/openrouter-completion-language-model.ts
1826
+ // src/completion/index.ts
1827
+ var OpenRouterCompletionChunkSchema = z6.union([
1828
+ z6.object({
1829
+ id: z6.string().optional(),
1830
+ model: z6.string().optional(),
1831
+ choices: z6.array(
1832
+ z6.object({
1833
+ text: z6.string(),
1834
+ reasoning: z6.string().nullish().optional(),
1835
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1836
+ finish_reason: z6.string().nullish(),
1837
+ index: z6.number(),
1838
+ logprobs: z6.object({
1839
+ tokens: z6.array(z6.string()),
1840
+ token_logprobs: z6.array(z6.number()),
1841
+ top_logprobs: z6.array(z6.record(z6.string(), z6.number())).nullable()
1842
+ }).nullable().optional()
1843
+ })
1844
+ ),
1845
+ usage: z6.object({
1846
+ prompt_tokens: z6.number(),
1847
+ prompt_tokens_details: z6.object({
1848
+ cached_tokens: z6.number()
1849
+ }).nullish(),
1850
+ completion_tokens: z6.number(),
1851
+ completion_tokens_details: z6.object({
1852
+ reasoning_tokens: z6.number()
1853
+ }).nullish(),
1854
+ total_tokens: z6.number(),
1855
+ cost: z6.number().optional()
1856
+ }).nullish()
1857
+ }),
1858
+ OpenRouterErrorResponseSchema
1859
+ ]);
1050
1860
  var OpenRouterCompletionLanguageModel = class {
1051
1861
  constructor(modelId, settings, config) {
1052
- this.specificationVersion = "v1";
1862
+ this.specificationVersion = "v2";
1863
+ this.provider = "openrouter";
1864
+ this.supportedUrls = {
1865
+ "image/*": [
1866
+ /^data:image\/[a-zA-Z]+;base64,/,
1867
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1868
+ ],
1869
+ "text/*": [/^data:text\//, /^https?:\/\/.+$/],
1870
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1871
+ };
1053
1872
  this.defaultObjectGenerationMode = void 0;
1054
1873
  this.modelId = modelId;
1055
1874
  this.settings = settings;
1056
1875
  this.config = config;
1057
1876
  }
1058
- get provider() {
1059
- return this.config.provider;
1060
- }
1061
1877
  getArgs({
1062
- mode,
1063
- inputFormat,
1064
1878
  prompt,
1065
- maxTokens,
1879
+ maxOutputTokens,
1066
1880
  temperature,
1067
1881
  topP,
1068
1882
  frequencyPenalty,
@@ -1071,16 +1885,24 @@ var OpenRouterCompletionLanguageModel = class {
1071
1885
  responseFormat,
1072
1886
  topK,
1073
1887
  stopSequences,
1074
- providerMetadata
1888
+ tools,
1889
+ toolChoice
1075
1890
  }) {
1076
- var _a, _b;
1077
- const type = mode.type;
1078
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
1079
1891
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
1080
1892
  prompt,
1081
- inputFormat
1893
+ inputFormat: "prompt"
1082
1894
  });
1083
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1895
+ if (tools == null ? void 0 : tools.length) {
1896
+ throw new UnsupportedFunctionalityError({
1897
+ functionality: "tools"
1898
+ });
1899
+ }
1900
+ if (toolChoice) {
1901
+ throw new UnsupportedFunctionalityError({
1902
+ functionality: "toolChoice"
1903
+ });
1904
+ }
1905
+ return __spreadValues(__spreadValues({
1084
1906
  // model id:
1085
1907
  model: this.modelId,
1086
1908
  models: this.settings.models,
@@ -1090,7 +1912,7 @@ var OpenRouterCompletionLanguageModel = class {
1090
1912
  suffix: this.settings.suffix,
1091
1913
  user: this.settings.user,
1092
1914
  // standardized settings:
1093
- max_tokens: maxTokens,
1915
+ max_tokens: maxOutputTokens,
1094
1916
  temperature,
1095
1917
  top_p: topP,
1096
1918
  frequency_penalty: frequencyPenalty,
@@ -1104,58 +1926,27 @@ var OpenRouterCompletionLanguageModel = class {
1104
1926
  // OpenRouter specific settings:
1105
1927
  include_reasoning: this.settings.includeReasoning,
1106
1928
  reasoning: this.settings.reasoning
1107
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
1108
- switch (type) {
1109
- case "regular": {
1110
- if ((_b = mode.tools) == null ? void 0 : _b.length) {
1111
- throw new UnsupportedFunctionalityError3({
1112
- functionality: "tools"
1113
- });
1114
- }
1115
- if (mode.toolChoice) {
1116
- throw new UnsupportedFunctionalityError3({
1117
- functionality: "toolChoice"
1118
- });
1119
- }
1120
- return baseArgs;
1121
- }
1122
- case "object-json": {
1123
- throw new UnsupportedFunctionalityError3({
1124
- functionality: "object-json mode"
1125
- });
1126
- }
1127
- case "object-tool": {
1128
- throw new UnsupportedFunctionalityError3({
1129
- functionality: "object-tool mode"
1130
- });
1131
- }
1132
- // Handle all non-text types with a single default case
1133
- default: {
1134
- const _exhaustiveCheck = type;
1135
- throw new UnsupportedFunctionalityError3({
1136
- functionality: `${_exhaustiveCheck} mode`
1137
- });
1138
- }
1139
- }
1929
+ }, this.config.extraBody), this.settings.extraBody);
1140
1930
  }
1141
1931
  async doGenerate(options) {
1142
- var _b, _c, _d, _e, _f;
1143
- const args = this.getArgs(options);
1144
- const { responseHeaders, value: response } = await postJsonToApi2({
1932
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
1933
+ const providerOptions = options.providerOptions || {};
1934
+ const openrouterOptions = providerOptions.openrouter || {};
1935
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1936
+ const { value: response, responseHeaders } = await postJsonToApi({
1145
1937
  url: this.config.url({
1146
1938
  path: "/completions",
1147
1939
  modelId: this.modelId
1148
1940
  }),
1149
- headers: combineHeaders2(this.config.headers(), options.headers),
1941
+ headers: combineHeaders(this.config.headers(), options.headers),
1150
1942
  body: args,
1151
1943
  failedResponseHandler: openrouterFailedResponseHandler,
1152
- successfulResponseHandler: createJsonResponseHandler2(
1944
+ successfulResponseHandler: createJsonResponseHandler(
1153
1945
  OpenRouterCompletionChunkSchema
1154
1946
  ),
1155
1947
  abortSignal: options.abortSignal,
1156
1948
  fetch: this.config.fetch
1157
1949
  });
1158
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1159
1950
  if ("error" in response) {
1160
1951
  throw new Error(`${response.error.message}`);
1161
1952
  }
@@ -1164,54 +1955,62 @@ var OpenRouterCompletionLanguageModel = class {
1164
1955
  throw new Error("No choice in OpenRouter completion response");
1165
1956
  }
1166
1957
  return {
1167
- response: {
1168
- id: response.id,
1169
- modelId: response.model
1170
- },
1171
- text: (_b = choice.text) != null ? _b : "",
1172
- reasoning: choice.reasoning || void 0,
1958
+ content: [
1959
+ {
1960
+ type: "text",
1961
+ text: (_a15 = choice.text) != null ? _a15 : ""
1962
+ }
1963
+ ],
1964
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1173
1965
  usage: {
1174
- promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : 0,
1175
- completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : 0
1966
+ inputTokens: (_c = (_b = response.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : 0,
1967
+ outputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.completion_tokens) != null ? _e : 0,
1968
+ totalTokens: ((_g = (_f = response.usage) == null ? void 0 : _f.prompt_tokens) != null ? _g : 0) + ((_i = (_h = response.usage) == null ? void 0 : _h.completion_tokens) != null ? _i : 0),
1969
+ reasoningTokens: (_l = (_k = (_j = response.usage) == null ? void 0 : _j.completion_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : 0,
1970
+ cachedInputTokens: (_o = (_n = (_m = response.usage) == null ? void 0 : _m.prompt_tokens_details) == null ? void 0 : _n.cached_tokens) != null ? _o : 0
1176
1971
  },
1177
- finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1178
- logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
1179
- rawCall: { rawPrompt, rawSettings },
1180
- rawResponse: { headers: responseHeaders },
1181
- warnings: []
1972
+ warnings: [],
1973
+ response: {
1974
+ headers: responseHeaders
1975
+ }
1182
1976
  };
1183
1977
  }
1184
1978
  async doStream(options) {
1185
- const args = this.getArgs(options);
1186
- const { responseHeaders, value: response } = await postJsonToApi2({
1979
+ const providerOptions = options.providerOptions || {};
1980
+ const openrouterOptions = providerOptions.openrouter || {};
1981
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1982
+ const { value: response, responseHeaders } = await postJsonToApi({
1187
1983
  url: this.config.url({
1188
1984
  path: "/completions",
1189
1985
  modelId: this.modelId
1190
1986
  }),
1191
- headers: combineHeaders2(this.config.headers(), options.headers),
1192
- body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
1987
+ headers: combineHeaders(this.config.headers(), options.headers),
1988
+ body: __spreadProps(__spreadValues({}, args), {
1193
1989
  stream: true,
1194
1990
  // only include stream_options when in strict compatibility mode:
1195
1991
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1196
1992
  }),
1197
1993
  failedResponseHandler: openrouterFailedResponseHandler,
1198
- successfulResponseHandler: createEventSourceResponseHandler2(
1994
+ successfulResponseHandler: createEventSourceResponseHandler(
1199
1995
  OpenRouterCompletionChunkSchema
1200
1996
  ),
1201
1997
  abortSignal: options.abortSignal,
1202
1998
  fetch: this.config.fetch
1203
1999
  });
1204
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1205
2000
  let finishReason = "other";
1206
- let usage = {
1207
- promptTokens: Number.NaN,
1208
- completionTokens: Number.NaN
2001
+ const usage = {
2002
+ inputTokens: Number.NaN,
2003
+ outputTokens: Number.NaN,
2004
+ totalTokens: Number.NaN,
2005
+ reasoningTokens: Number.NaN,
2006
+ cachedInputTokens: Number.NaN
1209
2007
  };
1210
- let logprobs;
2008
+ const openrouterUsage = {};
1211
2009
  return {
1212
2010
  stream: response.pipeThrough(
1213
2011
  new TransformStream({
1214
2012
  transform(chunk, controller) {
2013
+ var _a15, _b;
1215
2014
  if (!chunk.success) {
1216
2015
  finishReason = "error";
1217
2016
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1224,10 +2023,27 @@ var OpenRouterCompletionLanguageModel = class {
1224
2023
  return;
1225
2024
  }
1226
2025
  if (value.usage != null) {
1227
- usage = {
1228
- promptTokens: value.usage.prompt_tokens,
1229
- completionTokens: value.usage.completion_tokens
1230
- };
2026
+ usage.inputTokens = value.usage.prompt_tokens;
2027
+ usage.outputTokens = value.usage.completion_tokens;
2028
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
2029
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
2030
+ if (value.usage.prompt_tokens_details) {
2031
+ const cachedInputTokens = (_a15 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a15 : 0;
2032
+ usage.cachedInputTokens = cachedInputTokens;
2033
+ openrouterUsage.promptTokensDetails = {
2034
+ cachedTokens: cachedInputTokens
2035
+ };
2036
+ }
2037
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
2038
+ if (value.usage.completion_tokens_details) {
2039
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
2040
+ usage.reasoningTokens = reasoningTokens;
2041
+ openrouterUsage.completionTokensDetails = {
2042
+ reasoningTokens
2043
+ };
2044
+ }
2045
+ openrouterUsage.cost = value.usage.cost;
2046
+ openrouterUsage.totalTokens = value.usage.total_tokens;
1231
2047
  }
1232
2048
  const choice = value.choices[0];
1233
2049
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -1236,69 +2052,40 @@ var OpenRouterCompletionLanguageModel = class {
1236
2052
  if ((choice == null ? void 0 : choice.text) != null) {
1237
2053
  controller.enqueue({
1238
2054
  type: "text-delta",
1239
- textDelta: choice.text
2055
+ delta: choice.text,
2056
+ id: generateId()
1240
2057
  });
1241
2058
  }
1242
- const mappedLogprobs = mapOpenRouterCompletionLogProbs(
1243
- choice == null ? void 0 : choice.logprobs
1244
- );
1245
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
1246
- if (logprobs === void 0) {
1247
- logprobs = [];
1248
- }
1249
- logprobs.push(...mappedLogprobs);
1250
- }
1251
2059
  },
1252
2060
  flush(controller) {
1253
2061
  controller.enqueue({
1254
2062
  type: "finish",
1255
2063
  finishReason,
1256
- logprobs,
1257
- usage
2064
+ usage,
2065
+ providerMetadata: {
2066
+ openrouter: {
2067
+ usage: openrouterUsage
2068
+ }
2069
+ }
1258
2070
  });
1259
2071
  }
1260
2072
  })
1261
2073
  ),
1262
- rawCall: { rawPrompt, rawSettings },
1263
- rawResponse: { headers: responseHeaders },
1264
- warnings: []
2074
+ response: {
2075
+ headers: responseHeaders
2076
+ }
1265
2077
  };
1266
2078
  }
1267
2079
  };
1268
- var OpenRouterCompletionChunkSchema = z4.union([
1269
- z4.object({
1270
- id: z4.string().optional(),
1271
- model: z4.string().optional(),
1272
- choices: z4.array(
1273
- z4.object({
1274
- text: z4.string(),
1275
- reasoning: z4.string().nullish().optional(),
1276
- reasoning_details: ReasoningDetailArraySchema.nullish(),
1277
- finish_reason: z4.string().nullish(),
1278
- index: z4.number(),
1279
- logprobs: z4.object({
1280
- tokens: z4.array(z4.string()),
1281
- token_logprobs: z4.array(z4.number()),
1282
- top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
1283
- }).nullable().optional()
1284
- })
1285
- ),
1286
- usage: z4.object({
1287
- prompt_tokens: z4.number(),
1288
- completion_tokens: z4.number()
1289
- }).optional().nullable()
1290
- }),
1291
- OpenRouterErrorResponseSchema
1292
- ]);
1293
2080
 
1294
- // src/openrouter-facade.ts
2081
+ // src/facade.ts
1295
2082
  var OpenRouter = class {
1296
2083
  /**
1297
2084
  * Creates a new OpenRouter provider instance.
1298
2085
  */
1299
2086
  constructor(options = {}) {
1300
- var _a, _b;
1301
- this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
2087
+ var _a15, _b;
2088
+ this.baseURL = (_b = withoutTrailingSlash((_a15 = options.baseURL) != null ? _a15 : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
1302
2089
  this.apiKey = options.apiKey;
1303
2090
  this.headers = options.headers;
1304
2091
  }
@@ -1332,14 +2119,13 @@ var OpenRouter = class {
1332
2119
  }
1333
2120
  };
1334
2121
 
1335
- // src/openrouter-provider.ts
1336
- import { loadApiKey as loadApiKey2, withoutTrailingSlash as withoutTrailingSlash2 } from "@ai-sdk/provider-utils";
2122
+ // src/provider.ts
1337
2123
  function createOpenRouter(options = {}) {
1338
- var _a, _b, _c;
1339
- const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
2124
+ var _a15, _b, _c;
2125
+ const baseURL = (_b = withoutTrailingSlash((_a15 = options.baseURL) != null ? _a15 : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
1340
2126
  const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
1341
2127
  const getHeaders = () => __spreadValues({
1342
- Authorization: `Bearer ${loadApiKey2({
2128
+ Authorization: `Bearer ${loadApiKey({
1343
2129
  apiKey: options.apiKey,
1344
2130
  environmentVariableName: "OPENROUTER_API_KEY",
1345
2131
  description: "OpenRouter"