@openrouter/ai-sdk-provider 0.7.2 → 1.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -17,24 +17,875 @@ var __spreadValues = (a, b) => {
17
17
  return a;
18
18
  };
19
19
  var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
20
- var __objRest = (source, exclude) => {
21
- var target = {};
22
- for (var prop in source)
23
- if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
24
- target[prop] = source[prop];
25
- if (source != null && __getOwnPropSymbols)
26
- for (var prop of __getOwnPropSymbols(source)) {
27
- if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
28
- target[prop] = source[prop];
20
+
21
+ // node_modules/.pnpm/@ai-sdk+provider@2.0.0-beta.1/node_modules/@ai-sdk/provider/dist/index.mjs
22
+ var marker = "vercel.ai.error";
23
+ var symbol = Symbol.for(marker);
24
+ var _a;
25
+ var _AISDKError = class _AISDKError2 extends Error {
26
+ /**
27
+ * Creates an AI SDK Error.
28
+ *
29
+ * @param {Object} params - The parameters for creating the error.
30
+ * @param {string} params.name - The name of the error.
31
+ * @param {string} params.message - The error message.
32
+ * @param {unknown} [params.cause] - The underlying cause of the error.
33
+ */
34
+ constructor({
35
+ name: name14,
36
+ message,
37
+ cause
38
+ }) {
39
+ super(message);
40
+ this[_a] = true;
41
+ this.name = name14;
42
+ this.cause = cause;
43
+ }
44
+ /**
45
+ * Checks if the given error is an AI SDK Error.
46
+ * @param {unknown} error - The error to check.
47
+ * @returns {boolean} True if the error is an AI SDK Error, false otherwise.
48
+ */
49
+ static isInstance(error) {
50
+ return _AISDKError2.hasMarker(error, marker);
51
+ }
52
+ static hasMarker(error, marker15) {
53
+ const markerSymbol = Symbol.for(marker15);
54
+ return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
55
+ }
56
+ };
57
+ _a = symbol;
58
+ var AISDKError = _AISDKError;
59
+ var name = "AI_APICallError";
60
+ var marker2 = `vercel.ai.error.${name}`;
61
+ var symbol2 = Symbol.for(marker2);
62
+ var _a2;
63
+ var APICallError = class extends AISDKError {
64
+ constructor({
65
+ message,
66
+ url,
67
+ requestBodyValues,
68
+ statusCode,
69
+ responseHeaders,
70
+ responseBody,
71
+ cause,
72
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
73
+ statusCode === 409 || // conflict
74
+ statusCode === 429 || // too many requests
75
+ statusCode >= 500),
76
+ // server error
77
+ data
78
+ }) {
79
+ super({ name, message, cause });
80
+ this[_a2] = true;
81
+ this.url = url;
82
+ this.requestBodyValues = requestBodyValues;
83
+ this.statusCode = statusCode;
84
+ this.responseHeaders = responseHeaders;
85
+ this.responseBody = responseBody;
86
+ this.isRetryable = isRetryable;
87
+ this.data = data;
88
+ }
89
+ static isInstance(error) {
90
+ return AISDKError.hasMarker(error, marker2);
91
+ }
92
+ };
93
+ _a2 = symbol2;
94
+ var name2 = "AI_EmptyResponseBodyError";
95
+ var marker3 = `vercel.ai.error.${name2}`;
96
+ var symbol3 = Symbol.for(marker3);
97
+ var _a3;
98
+ var EmptyResponseBodyError = class extends AISDKError {
99
+ // used in isInstance
100
+ constructor({ message = "Empty response body" } = {}) {
101
+ super({ name: name2, message });
102
+ this[_a3] = true;
103
+ }
104
+ static isInstance(error) {
105
+ return AISDKError.hasMarker(error, marker3);
106
+ }
107
+ };
108
+ _a3 = symbol3;
109
+ function getErrorMessage(error) {
110
+ if (error == null) {
111
+ return "unknown error";
112
+ }
113
+ if (typeof error === "string") {
114
+ return error;
115
+ }
116
+ if (error instanceof Error) {
117
+ return error.message;
118
+ }
119
+ return JSON.stringify(error);
120
+ }
121
+ var name3 = "AI_InvalidArgumentError";
122
+ var marker4 = `vercel.ai.error.${name3}`;
123
+ var symbol4 = Symbol.for(marker4);
124
+ var _a4;
125
+ var InvalidArgumentError = class extends AISDKError {
126
+ constructor({
127
+ message,
128
+ cause,
129
+ argument
130
+ }) {
131
+ super({ name: name3, message, cause });
132
+ this[_a4] = true;
133
+ this.argument = argument;
134
+ }
135
+ static isInstance(error) {
136
+ return AISDKError.hasMarker(error, marker4);
137
+ }
138
+ };
139
+ _a4 = symbol4;
140
+ var name4 = "AI_InvalidPromptError";
141
+ var marker5 = `vercel.ai.error.${name4}`;
142
+ var symbol5 = Symbol.for(marker5);
143
+ var _a5;
144
+ var InvalidPromptError = class extends AISDKError {
145
+ constructor({
146
+ prompt,
147
+ message,
148
+ cause
149
+ }) {
150
+ super({ name: name4, message: `Invalid prompt: ${message}`, cause });
151
+ this[_a5] = true;
152
+ this.prompt = prompt;
153
+ }
154
+ static isInstance(error) {
155
+ return AISDKError.hasMarker(error, marker5);
156
+ }
157
+ };
158
+ _a5 = symbol5;
159
+ var name5 = "AI_InvalidResponseDataError";
160
+ var marker6 = `vercel.ai.error.${name5}`;
161
+ var symbol6 = Symbol.for(marker6);
162
+ var _a6;
163
+ var InvalidResponseDataError = class extends AISDKError {
164
+ constructor({
165
+ data,
166
+ message = `Invalid response data: ${JSON.stringify(data)}.`
167
+ }) {
168
+ super({ name: name5, message });
169
+ this[_a6] = true;
170
+ this.data = data;
171
+ }
172
+ static isInstance(error) {
173
+ return AISDKError.hasMarker(error, marker6);
174
+ }
175
+ };
176
+ _a6 = symbol6;
177
+ var name6 = "AI_JSONParseError";
178
+ var marker7 = `vercel.ai.error.${name6}`;
179
+ var symbol7 = Symbol.for(marker7);
180
+ var _a7;
181
+ var JSONParseError = class extends AISDKError {
182
+ constructor({ text, cause }) {
183
+ super({
184
+ name: name6,
185
+ message: `JSON parsing failed: Text: ${text}.
186
+ Error message: ${getErrorMessage(cause)}`,
187
+ cause
188
+ });
189
+ this[_a7] = true;
190
+ this.text = text;
191
+ }
192
+ static isInstance(error) {
193
+ return AISDKError.hasMarker(error, marker7);
194
+ }
195
+ };
196
+ _a7 = symbol7;
197
+ var name7 = "AI_LoadAPIKeyError";
198
+ var marker8 = `vercel.ai.error.${name7}`;
199
+ var symbol8 = Symbol.for(marker8);
200
+ var _a8;
201
+ var LoadAPIKeyError = class extends AISDKError {
202
+ // used in isInstance
203
+ constructor({ message }) {
204
+ super({ name: name7, message });
205
+ this[_a8] = true;
206
+ }
207
+ static isInstance(error) {
208
+ return AISDKError.hasMarker(error, marker8);
209
+ }
210
+ };
211
+ _a8 = symbol8;
212
+ var name8 = "AI_LoadSettingError";
213
+ var marker9 = `vercel.ai.error.${name8}`;
214
+ var symbol9 = Symbol.for(marker9);
215
+ var _a9;
216
+ _a9 = symbol9;
217
+ var name9 = "AI_NoContentGeneratedError";
218
+ var marker10 = `vercel.ai.error.${name9}`;
219
+ var symbol10 = Symbol.for(marker10);
220
+ var _a10;
221
+ _a10 = symbol10;
222
+ var name10 = "AI_NoSuchModelError";
223
+ var marker11 = `vercel.ai.error.${name10}`;
224
+ var symbol11 = Symbol.for(marker11);
225
+ var _a11;
226
+ _a11 = symbol11;
227
+ var name11 = "AI_TooManyEmbeddingValuesForCallError";
228
+ var marker12 = `vercel.ai.error.${name11}`;
229
+ var symbol12 = Symbol.for(marker12);
230
+ var _a12;
231
+ _a12 = symbol12;
232
+ var name12 = "AI_TypeValidationError";
233
+ var marker13 = `vercel.ai.error.${name12}`;
234
+ var symbol13 = Symbol.for(marker13);
235
+ var _a13;
236
+ var _TypeValidationError = class _TypeValidationError2 extends AISDKError {
237
+ constructor({ value, cause }) {
238
+ super({
239
+ name: name12,
240
+ message: `Type validation failed: Value: ${JSON.stringify(value)}.
241
+ Error message: ${getErrorMessage(cause)}`,
242
+ cause
243
+ });
244
+ this[_a13] = true;
245
+ this.value = value;
246
+ }
247
+ static isInstance(error) {
248
+ return AISDKError.hasMarker(error, marker13);
249
+ }
250
+ /**
251
+ * Wraps an error into a TypeValidationError.
252
+ * If the cause is already a TypeValidationError with the same value, it returns the cause.
253
+ * Otherwise, it creates a new TypeValidationError.
254
+ *
255
+ * @param {Object} params - The parameters for wrapping the error.
256
+ * @param {unknown} params.value - The value that failed validation.
257
+ * @param {unknown} params.cause - The original error or cause of the validation failure.
258
+ * @returns {TypeValidationError} A TypeValidationError instance.
259
+ */
260
+ static wrap({
261
+ value,
262
+ cause
263
+ }) {
264
+ return _TypeValidationError2.isInstance(cause) && cause.value === value ? cause : new _TypeValidationError2({ value, cause });
265
+ }
266
+ };
267
+ _a13 = symbol13;
268
+ var TypeValidationError = _TypeValidationError;
269
+ var name13 = "AI_UnsupportedFunctionalityError";
270
+ var marker14 = `vercel.ai.error.${name13}`;
271
+ var symbol14 = Symbol.for(marker14);
272
+ var _a14;
273
+ var UnsupportedFunctionalityError = class extends AISDKError {
274
+ constructor({
275
+ functionality,
276
+ message = `'${functionality}' functionality not supported.`
277
+ }) {
278
+ super({ name: name13, message });
279
+ this[_a14] = true;
280
+ this.functionality = functionality;
281
+ }
282
+ static isInstance(error) {
283
+ return AISDKError.hasMarker(error, marker14);
284
+ }
285
+ };
286
+ _a14 = symbol14;
287
+
288
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/index.js
289
+ var ParseError = class extends Error {
290
+ constructor(message, options) {
291
+ super(message), this.name = "ParseError", this.type = options.type, this.field = options.field, this.value = options.value, this.line = options.line;
292
+ }
293
+ };
294
+ function noop(_arg) {
295
+ }
296
+ function createParser(callbacks) {
297
+ if (typeof callbacks == "function")
298
+ throw new TypeError(
299
+ "`callbacks` must be an object, got a function instead. Did you mean `{onEvent: fn}`?"
300
+ );
301
+ const { onEvent = noop, onError = noop, onRetry = noop, onComment } = callbacks;
302
+ let incompleteLine = "", isFirstChunk = true, id, data = "", eventType = "";
303
+ function feed(newChunk) {
304
+ const chunk = isFirstChunk ? newChunk.replace(/^\xEF\xBB\xBF/, "") : newChunk, [complete, incomplete] = splitLines(`${incompleteLine}${chunk}`);
305
+ for (const line of complete)
306
+ parseLine(line);
307
+ incompleteLine = incomplete, isFirstChunk = false;
308
+ }
309
+ function parseLine(line) {
310
+ if (line === "") {
311
+ dispatchEvent();
312
+ return;
313
+ }
314
+ if (line.startsWith(":")) {
315
+ onComment && onComment(line.slice(line.startsWith(": ") ? 2 : 1));
316
+ return;
317
+ }
318
+ const fieldSeparatorIndex = line.indexOf(":");
319
+ if (fieldSeparatorIndex !== -1) {
320
+ const field = line.slice(0, fieldSeparatorIndex), offset = line[fieldSeparatorIndex + 1] === " " ? 2 : 1, value = line.slice(fieldSeparatorIndex + offset);
321
+ processField(field, value, line);
322
+ return;
323
+ }
324
+ processField(line, "", line);
325
+ }
326
+ function processField(field, value, line) {
327
+ switch (field) {
328
+ case "event":
329
+ eventType = value;
330
+ break;
331
+ case "data":
332
+ data = `${data}${value}
333
+ `;
334
+ break;
335
+ case "id":
336
+ id = value.includes("\0") ? void 0 : value;
337
+ break;
338
+ case "retry":
339
+ /^\d+$/.test(value) ? onRetry(parseInt(value, 10)) : onError(
340
+ new ParseError(`Invalid \`retry\` value: "${value}"`, {
341
+ type: "invalid-retry",
342
+ value,
343
+ line
344
+ })
345
+ );
346
+ break;
347
+ default:
348
+ onError(
349
+ new ParseError(
350
+ `Unknown field "${field.length > 20 ? `${field.slice(0, 20)}\u2026` : field}"`,
351
+ { type: "unknown-field", field, value, line }
352
+ )
353
+ );
354
+ break;
29
355
  }
30
- return target;
356
+ }
357
+ function dispatchEvent() {
358
+ data.length > 0 && onEvent({
359
+ id,
360
+ event: eventType || void 0,
361
+ // If the data buffer's last character is a U+000A LINE FEED (LF) character,
362
+ // then remove the last character from the data buffer.
363
+ data: data.endsWith(`
364
+ `) ? data.slice(0, -1) : data
365
+ }), id = void 0, data = "", eventType = "";
366
+ }
367
+ function reset(options = {}) {
368
+ incompleteLine && options.consume && parseLine(incompleteLine), isFirstChunk = true, id = void 0, data = "", eventType = "", incompleteLine = "";
369
+ }
370
+ return { feed, reset };
371
+ }
372
+ function splitLines(chunk) {
373
+ const lines = [];
374
+ let incompleteLine = "", searchIndex = 0;
375
+ for (; searchIndex < chunk.length; ) {
376
+ const crIndex = chunk.indexOf("\r", searchIndex), lfIndex = chunk.indexOf(`
377
+ `, searchIndex);
378
+ let lineEnd = -1;
379
+ if (crIndex !== -1 && lfIndex !== -1 ? lineEnd = Math.min(crIndex, lfIndex) : crIndex !== -1 ? lineEnd = crIndex : lfIndex !== -1 && (lineEnd = lfIndex), lineEnd === -1) {
380
+ incompleteLine = chunk.slice(searchIndex);
381
+ break;
382
+ } else {
383
+ const line = chunk.slice(searchIndex, lineEnd);
384
+ lines.push(line), searchIndex = lineEnd + 1, chunk[searchIndex - 1] === "\r" && chunk[searchIndex] === `
385
+ ` && searchIndex++;
386
+ }
387
+ }
388
+ return [lines, incompleteLine];
389
+ }
390
+
391
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/stream.js
392
+ var EventSourceParserStream = class extends TransformStream {
393
+ constructor({ onError, onRetry, onComment } = {}) {
394
+ let parser;
395
+ super({
396
+ start(controller) {
397
+ parser = createParser({
398
+ onEvent: (event) => {
399
+ controller.enqueue(event);
400
+ },
401
+ onError(error) {
402
+ onError === "terminate" ? controller.error(error) : typeof onError == "function" && onError(error);
403
+ },
404
+ onRetry,
405
+ onComment
406
+ });
407
+ },
408
+ transform(chunk) {
409
+ parser.feed(chunk);
410
+ }
411
+ });
412
+ }
31
413
  };
32
414
 
33
- // src/openrouter-facade.ts
34
- import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
415
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
416
+ import * as z4 from "zod/v4";
417
+
418
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/Options.js
419
+ var ignoreOverride = Symbol("Let zodToJsonSchema decide on which parser to use");
420
+
421
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/selectParser.js
422
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind3 } from "zod";
423
+
424
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/array.js
425
+ import { ZodFirstPartyTypeKind } from "zod";
426
+
427
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/record.js
428
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind2 } from "zod";
429
+
430
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/string.js
431
+ var ALPHA_NUMERIC = new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789");
432
+
433
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/object.js
434
+ import { ZodOptional } from "zod";
435
+
436
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
437
+ function combineHeaders(...headers) {
438
+ return headers.reduce(
439
+ (combinedHeaders, currentHeaders) => __spreadValues(__spreadValues({}, combinedHeaders), currentHeaders != null ? currentHeaders : {}),
440
+ {}
441
+ );
442
+ }
443
+ function extractResponseHeaders(response) {
444
+ return Object.fromEntries([...response.headers]);
445
+ }
446
+ var createIdGenerator = ({
447
+ prefix,
448
+ size = 16,
449
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
450
+ separator = "-"
451
+ } = {}) => {
452
+ const generator = () => {
453
+ const alphabetLength = alphabet.length;
454
+ const chars = new Array(size);
455
+ for (let i = 0; i < size; i++) {
456
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
457
+ }
458
+ return chars.join("");
459
+ };
460
+ if (prefix == null) {
461
+ return generator;
462
+ }
463
+ if (alphabet.includes(separator)) {
464
+ throw new InvalidArgumentError({
465
+ argument: "separator",
466
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
467
+ });
468
+ }
469
+ return () => `${prefix}${separator}${generator()}`;
470
+ };
471
+ var generateId = createIdGenerator();
472
+ function isAbortError(error) {
473
+ return error instanceof Error && (error.name === "AbortError" || error.name === "TimeoutError");
474
+ }
475
+ var FETCH_FAILED_ERROR_MESSAGES = ["fetch failed", "failed to fetch"];
476
+ function handleFetchError({
477
+ error,
478
+ url,
479
+ requestBodyValues
480
+ }) {
481
+ if (isAbortError(error)) {
482
+ return error;
483
+ }
484
+ if (error instanceof TypeError && FETCH_FAILED_ERROR_MESSAGES.includes(error.message.toLowerCase())) {
485
+ const cause = error.cause;
486
+ if (cause != null) {
487
+ return new APICallError({
488
+ message: `Cannot connect to API: ${cause.message}`,
489
+ cause,
490
+ url,
491
+ requestBodyValues,
492
+ isRetryable: true
493
+ // retry when network error
494
+ });
495
+ }
496
+ }
497
+ return error;
498
+ }
499
+ function removeUndefinedEntries(record) {
500
+ return Object.fromEntries(
501
+ Object.entries(record).filter(([_key, value]) => value != null)
502
+ );
503
+ }
504
+ function loadApiKey({
505
+ apiKey,
506
+ environmentVariableName,
507
+ apiKeyParameterName = "apiKey",
508
+ description
509
+ }) {
510
+ if (typeof apiKey === "string") {
511
+ return apiKey;
512
+ }
513
+ if (apiKey != null) {
514
+ throw new LoadAPIKeyError({
515
+ message: `${description} API key must be a string.`
516
+ });
517
+ }
518
+ if (typeof process === "undefined") {
519
+ throw new LoadAPIKeyError({
520
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
521
+ });
522
+ }
523
+ apiKey = process.env[environmentVariableName];
524
+ if (apiKey == null) {
525
+ throw new LoadAPIKeyError({
526
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
527
+ });
528
+ }
529
+ if (typeof apiKey !== "string") {
530
+ throw new LoadAPIKeyError({
531
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
532
+ });
533
+ }
534
+ return apiKey;
535
+ }
536
+ var suspectProtoRx = /"__proto__"\s*:/;
537
+ var suspectConstructorRx = /"constructor"\s*:/;
538
+ function _parse(text) {
539
+ const obj = JSON.parse(text);
540
+ if (obj === null || typeof obj !== "object") {
541
+ return obj;
542
+ }
543
+ if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
544
+ return obj;
545
+ }
546
+ return filter(obj);
547
+ }
548
+ function filter(obj) {
549
+ let next = [obj];
550
+ while (next.length) {
551
+ const nodes = next;
552
+ next = [];
553
+ for (const node of nodes) {
554
+ if (Object.prototype.hasOwnProperty.call(node, "__proto__")) {
555
+ throw new SyntaxError("Object contains forbidden prototype property");
556
+ }
557
+ if (Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
558
+ throw new SyntaxError("Object contains forbidden prototype property");
559
+ }
560
+ for (const key in node) {
561
+ const value = node[key];
562
+ if (value && typeof value === "object") {
563
+ next.push(value);
564
+ }
565
+ }
566
+ }
567
+ }
568
+ return obj;
569
+ }
570
+ function secureJsonParse(text) {
571
+ const { stackTraceLimit } = Error;
572
+ Error.stackTraceLimit = 0;
573
+ try {
574
+ return _parse(text);
575
+ } finally {
576
+ Error.stackTraceLimit = stackTraceLimit;
577
+ }
578
+ }
579
+ var validatorSymbol = Symbol.for("vercel.ai.validator");
580
+ function validator(validate) {
581
+ return { [validatorSymbol]: true, validate };
582
+ }
583
+ function isValidator(value) {
584
+ return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
585
+ }
586
+ function asValidator(value) {
587
+ return isValidator(value) ? value : standardSchemaValidator(value);
588
+ }
589
+ function standardSchemaValidator(standardSchema) {
590
+ return validator(async (value) => {
591
+ const result = await standardSchema["~standard"].validate(value);
592
+ return result.issues == null ? { success: true, value: result.value } : {
593
+ success: false,
594
+ error: new TypeValidationError({
595
+ value,
596
+ cause: result.issues
597
+ })
598
+ };
599
+ });
600
+ }
601
+ async function validateTypes({
602
+ value,
603
+ schema
604
+ }) {
605
+ const result = await safeValidateTypes({ value, schema });
606
+ if (!result.success) {
607
+ throw TypeValidationError.wrap({ value, cause: result.error });
608
+ }
609
+ return result.value;
610
+ }
611
+ async function safeValidateTypes({
612
+ value,
613
+ schema
614
+ }) {
615
+ const validator2 = asValidator(schema);
616
+ try {
617
+ if (validator2.validate == null) {
618
+ return { success: true, value, rawValue: value };
619
+ }
620
+ const result = await validator2.validate(value);
621
+ if (result.success) {
622
+ return { success: true, value: result.value, rawValue: value };
623
+ }
624
+ return {
625
+ success: false,
626
+ error: TypeValidationError.wrap({ value, cause: result.error }),
627
+ rawValue: value
628
+ };
629
+ } catch (error) {
630
+ return {
631
+ success: false,
632
+ error: TypeValidationError.wrap({ value, cause: error }),
633
+ rawValue: value
634
+ };
635
+ }
636
+ }
637
+ async function parseJSON({
638
+ text,
639
+ schema
640
+ }) {
641
+ try {
642
+ const value = secureJsonParse(text);
643
+ if (schema == null) {
644
+ return value;
645
+ }
646
+ return validateTypes({ value, schema });
647
+ } catch (error) {
648
+ if (JSONParseError.isInstance(error) || TypeValidationError.isInstance(error)) {
649
+ throw error;
650
+ }
651
+ throw new JSONParseError({ text, cause: error });
652
+ }
653
+ }
654
+ async function safeParseJSON({
655
+ text,
656
+ schema
657
+ }) {
658
+ try {
659
+ const value = secureJsonParse(text);
660
+ if (schema == null) {
661
+ return { success: true, value, rawValue: value };
662
+ }
663
+ return await safeValidateTypes({ value, schema });
664
+ } catch (error) {
665
+ return {
666
+ success: false,
667
+ error: JSONParseError.isInstance(error) ? error : new JSONParseError({ text, cause: error }),
668
+ rawValue: void 0
669
+ };
670
+ }
671
+ }
672
+ function isParsableJson(input) {
673
+ try {
674
+ secureJsonParse(input);
675
+ return true;
676
+ } catch (e) {
677
+ return false;
678
+ }
679
+ }
680
+ function parseJsonEventStream({
681
+ stream,
682
+ schema
683
+ }) {
684
+ return stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
685
+ new TransformStream({
686
+ async transform({ data }, controller) {
687
+ if (data === "[DONE]") {
688
+ return;
689
+ }
690
+ controller.enqueue(await safeParseJSON({ text: data, schema }));
691
+ }
692
+ })
693
+ );
694
+ }
695
+ var getOriginalFetch2 = () => globalThis.fetch;
696
+ var postJsonToApi = async ({
697
+ url,
698
+ headers,
699
+ body,
700
+ failedResponseHandler,
701
+ successfulResponseHandler,
702
+ abortSignal,
703
+ fetch
704
+ }) => postToApi({
705
+ url,
706
+ headers: __spreadValues({
707
+ "Content-Type": "application/json"
708
+ }, headers),
709
+ body: {
710
+ content: JSON.stringify(body),
711
+ values: body
712
+ },
713
+ failedResponseHandler,
714
+ successfulResponseHandler,
715
+ abortSignal,
716
+ fetch
717
+ });
718
+ var postToApi = async ({
719
+ url,
720
+ headers = {},
721
+ body,
722
+ successfulResponseHandler,
723
+ failedResponseHandler,
724
+ abortSignal,
725
+ fetch = getOriginalFetch2()
726
+ }) => {
727
+ try {
728
+ const response = await fetch(url, {
729
+ method: "POST",
730
+ headers: removeUndefinedEntries(headers),
731
+ body: body.content,
732
+ signal: abortSignal
733
+ });
734
+ const responseHeaders = extractResponseHeaders(response);
735
+ if (!response.ok) {
736
+ let errorInformation;
737
+ try {
738
+ errorInformation = await failedResponseHandler({
739
+ response,
740
+ url,
741
+ requestBodyValues: body.values
742
+ });
743
+ } catch (error) {
744
+ if (isAbortError(error) || APICallError.isInstance(error)) {
745
+ throw error;
746
+ }
747
+ throw new APICallError({
748
+ message: "Failed to process error response",
749
+ cause: error,
750
+ statusCode: response.status,
751
+ url,
752
+ responseHeaders,
753
+ requestBodyValues: body.values
754
+ });
755
+ }
756
+ throw errorInformation.value;
757
+ }
758
+ try {
759
+ return await successfulResponseHandler({
760
+ response,
761
+ url,
762
+ requestBodyValues: body.values
763
+ });
764
+ } catch (error) {
765
+ if (error instanceof Error) {
766
+ if (isAbortError(error) || APICallError.isInstance(error)) {
767
+ throw error;
768
+ }
769
+ }
770
+ throw new APICallError({
771
+ message: "Failed to process successful response",
772
+ cause: error,
773
+ statusCode: response.status,
774
+ url,
775
+ responseHeaders,
776
+ requestBodyValues: body.values
777
+ });
778
+ }
779
+ } catch (error) {
780
+ throw handleFetchError({ error, url, requestBodyValues: body.values });
781
+ }
782
+ };
783
+ var createJsonErrorResponseHandler = ({
784
+ errorSchema,
785
+ errorToMessage,
786
+ isRetryable
787
+ }) => async ({ response, url, requestBodyValues }) => {
788
+ const responseBody = await response.text();
789
+ const responseHeaders = extractResponseHeaders(response);
790
+ if (responseBody.trim() === "") {
791
+ return {
792
+ responseHeaders,
793
+ value: new APICallError({
794
+ message: response.statusText,
795
+ url,
796
+ requestBodyValues,
797
+ statusCode: response.status,
798
+ responseHeaders,
799
+ responseBody,
800
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
801
+ })
802
+ };
803
+ }
804
+ try {
805
+ const parsedError = await parseJSON({
806
+ text: responseBody,
807
+ schema: errorSchema
808
+ });
809
+ return {
810
+ responseHeaders,
811
+ value: new APICallError({
812
+ message: errorToMessage(parsedError),
813
+ url,
814
+ requestBodyValues,
815
+ statusCode: response.status,
816
+ responseHeaders,
817
+ responseBody,
818
+ data: parsedError,
819
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
820
+ })
821
+ };
822
+ } catch (parseError) {
823
+ return {
824
+ responseHeaders,
825
+ value: new APICallError({
826
+ message: response.statusText,
827
+ url,
828
+ requestBodyValues,
829
+ statusCode: response.status,
830
+ responseHeaders,
831
+ responseBody,
832
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
833
+ })
834
+ };
835
+ }
836
+ };
837
+ var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
838
+ const responseHeaders = extractResponseHeaders(response);
839
+ if (response.body == null) {
840
+ throw new EmptyResponseBodyError({});
841
+ }
842
+ return {
843
+ responseHeaders,
844
+ value: parseJsonEventStream({
845
+ stream: response.body,
846
+ schema: chunkSchema
847
+ })
848
+ };
849
+ };
850
+ var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
851
+ const responseBody = await response.text();
852
+ const parsedResult = await safeParseJSON({
853
+ text: responseBody,
854
+ schema: responseSchema
855
+ });
856
+ const responseHeaders = extractResponseHeaders(response);
857
+ if (!parsedResult.success) {
858
+ throw new APICallError({
859
+ message: "Invalid JSON response",
860
+ cause: parsedResult.error,
861
+ statusCode: response.status,
862
+ responseHeaders,
863
+ responseBody,
864
+ url,
865
+ requestBodyValues
866
+ });
867
+ }
868
+ return {
869
+ responseHeaders,
870
+ value: parsedResult.value,
871
+ rawValue: parsedResult.rawValue
872
+ };
873
+ };
874
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
875
+ var { btoa, atob } = globalThis;
876
+ function convertUint8ArrayToBase64(array) {
877
+ let latin1string = "";
878
+ for (let i = 0; i < array.length; i++) {
879
+ latin1string += String.fromCodePoint(array[i]);
880
+ }
881
+ return btoa(latin1string);
882
+ }
883
+ function withoutTrailingSlash(url) {
884
+ return url == null ? void 0 : url.replace(/\/$/, "");
885
+ }
35
886
 
36
887
  // src/schemas/reasoning-details.ts
37
- import { z } from "zod";
888
+ import { z } from "zod/v4";
38
889
  var ReasoningDetailSummarySchema = z.object({
39
890
  type: z.literal("reasoning.summary" /* Summary */),
40
891
  summary: z.string()
@@ -59,56 +910,79 @@ var ReasoningDetailsWithUnknownSchema = z.union([
59
910
  ]);
60
911
  var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
61
912
 
62
- // src/openrouter-chat-language-model.ts
63
- import {
64
- InvalidResponseDataError,
65
- UnsupportedFunctionalityError
66
- } from "@ai-sdk/provider";
67
- import {
68
- combineHeaders,
69
- createEventSourceResponseHandler,
70
- createJsonResponseHandler,
71
- generateId,
72
- isParsableJson,
73
- postJsonToApi
74
- } from "@ai-sdk/provider-utils";
75
- import { z as z3 } from "zod";
913
+ // src/schemas/error-response.ts
914
+ import { z as z2 } from "zod/v4";
915
+ var OpenRouterErrorResponseSchema = z2.object({
916
+ error: z2.object({
917
+ code: z2.union([z2.string(), z2.number()]).nullable(),
918
+ message: z2.string(),
919
+ type: z2.string().nullable(),
920
+ param: z2.any().nullable()
921
+ })
922
+ });
923
+ var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
924
+ errorSchema: OpenRouterErrorResponseSchema,
925
+ errorToMessage: (data) => data.error.message
926
+ });
927
+
928
+ // src/utils/map-finish-reason.ts
929
+ function mapOpenRouterFinishReason(finishReason) {
930
+ switch (finishReason) {
931
+ case "stop":
932
+ return "stop";
933
+ case "length":
934
+ return "length";
935
+ case "content_filter":
936
+ return "content-filter";
937
+ case "function_call":
938
+ case "tool_calls":
939
+ return "tool-calls";
940
+ default:
941
+ return "unknown";
942
+ }
943
+ }
76
944
 
77
- // src/convert-to-openrouter-chat-messages.ts
78
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
945
+ // src/chat/convert-to-openrouter-chat-messages.ts
79
946
  function getCacheControl(providerMetadata) {
80
- var _a, _b, _c;
947
+ var _a15, _b, _c;
81
948
  const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
82
949
  const openrouter2 = providerMetadata == null ? void 0 : providerMetadata.openrouter;
83
- return (_c = (_b = (_a = openrouter2 == null ? void 0 : openrouter2.cacheControl) != null ? _a : openrouter2 == null ? void 0 : openrouter2.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
950
+ return (_c = (_b = (_a15 = openrouter2 == null ? void 0 : openrouter2.cacheControl) != null ? _a15 : openrouter2 == null ? void 0 : openrouter2.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
84
951
  }
85
952
  function convertToOpenRouterChatMessages(prompt) {
86
- var _a, _b, _c;
953
+ var _a15, _b, _c;
87
954
  const messages = [];
88
- for (const { role, content, providerMetadata } of prompt) {
955
+ for (const { role, content, providerOptions } of prompt) {
89
956
  switch (role) {
90
957
  case "system": {
91
958
  messages.push({
92
959
  role: "system",
93
960
  content,
94
- cache_control: getCacheControl(providerMetadata)
961
+ cache_control: getCacheControl(providerOptions)
95
962
  });
96
963
  break;
97
964
  }
98
965
  case "user": {
99
- if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
966
+ if (content.length === 1 && ((_a15 = content[0]) == null ? void 0 : _a15.type) === "text") {
967
+ const cacheControl = (_b = getCacheControl(providerOptions)) != null ? _b : getCacheControl(content[0].providerOptions);
968
+ const contentWithCacheControl = cacheControl ? [
969
+ {
970
+ type: "text",
971
+ text: content[0].text,
972
+ cache_control: cacheControl
973
+ }
974
+ ] : content[0].text;
100
975
  messages.push({
101
976
  role: "user",
102
- content: content[0].text,
103
- cache_control: (_b = getCacheControl(providerMetadata)) != null ? _b : getCacheControl(content[0].providerMetadata)
977
+ content: contentWithCacheControl
104
978
  });
105
979
  break;
106
980
  }
107
- const messageCacheControl = getCacheControl(providerMetadata);
981
+ const messageCacheControl = getCacheControl(providerOptions);
108
982
  const contentParts = content.map(
109
983
  (part) => {
110
- var _a2, _b2, _c2, _d;
111
- const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
984
+ var _a16, _b2, _c2, _d, _e, _f, _g;
985
+ const cacheControl = (_a16 = getCacheControl(part.providerOptions)) != null ? _a16 : messageCacheControl;
112
986
  switch (part.type) {
113
987
  case "text":
114
988
  return {
@@ -117,33 +991,35 @@ function convertToOpenRouterChatMessages(prompt) {
117
991
  // For text parts, only use part-specific cache control
118
992
  cache_control: cacheControl
119
993
  };
120
- case "image":
121
- return {
122
- type: "image_url",
123
- image_url: {
124
- url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
125
- part.image
126
- )}`
127
- },
128
- // For image parts, use part-specific or message-level cache control
129
- cache_control: cacheControl
130
- };
131
994
  case "file":
995
+ if ((_b2 = part.mediaType) == null ? void 0 : _b2.startsWith("image/")) {
996
+ return {
997
+ type: "image_url",
998
+ image_url: {
999
+ url: part.data instanceof URL ? part.data.toString() : `data:${(_c2 = part.mediaType) != null ? _c2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
1000
+ part.data instanceof Uint8Array ? part.data : new Uint8Array()
1001
+ )}`
1002
+ },
1003
+ // For image parts, use part-specific or message-level cache control
1004
+ cache_control: cacheControl
1005
+ };
1006
+ }
132
1007
  return {
133
1008
  type: "file",
134
1009
  file: {
135
1010
  filename: String(
136
- (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
1011
+ (_g = (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.filename) != null ? _f : part.filename) != null ? _g : ""
137
1012
  ),
138
- file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
1013
+ file_data: part.data instanceof Uint8Array ? `data:${part.mediaType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mediaType};base64,${part.data}`
139
1014
  },
140
1015
  cache_control: cacheControl
141
1016
  };
142
1017
  default: {
143
- const _exhaustiveCheck = part;
144
- throw new Error(
145
- `Unsupported content part type: ${_exhaustiveCheck}`
146
- );
1018
+ return {
1019
+ type: "text",
1020
+ text: "",
1021
+ cache_control: cacheControl
1022
+ };
147
1023
  }
148
1024
  }
149
1025
  }
@@ -171,7 +1047,7 @@ function convertToOpenRouterChatMessages(prompt) {
171
1047
  type: "function",
172
1048
  function: {
173
1049
  name: part.toolName,
174
- arguments: JSON.stringify(part.args)
1050
+ arguments: JSON.stringify(part.input)
175
1051
  }
176
1052
  });
177
1053
  break;
@@ -180,23 +1056,14 @@ function convertToOpenRouterChatMessages(prompt) {
180
1056
  reasoning += part.text;
181
1057
  reasoningDetails.push({
182
1058
  type: "reasoning.text" /* Text */,
183
- text: part.text,
184
- signature: part.signature
185
- });
186
- break;
187
- }
188
- case "redacted-reasoning": {
189
- reasoningDetails.push({
190
- type: "reasoning.encrypted" /* Encrypted */,
191
- data: part.data
1059
+ text: part.text
192
1060
  });
193
1061
  break;
194
1062
  }
195
1063
  case "file":
196
1064
  break;
197
1065
  default: {
198
- const _exhaustiveCheck = part;
199
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
1066
+ break;
200
1067
  }
201
1068
  }
202
1069
  }
@@ -206,95 +1073,185 @@ function convertToOpenRouterChatMessages(prompt) {
206
1073
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
207
1074
  reasoning: reasoning || void 0,
208
1075
  reasoning_details: reasoningDetails.length > 0 ? reasoningDetails : void 0,
209
- cache_control: getCacheControl(providerMetadata)
1076
+ cache_control: getCacheControl(providerOptions)
210
1077
  });
211
1078
  break;
212
1079
  }
213
1080
  case "tool": {
214
1081
  for (const toolResponse of content) {
1082
+ const content2 = getToolResultContent(toolResponse);
215
1083
  messages.push({
216
1084
  role: "tool",
217
1085
  tool_call_id: toolResponse.toolCallId,
218
- content: JSON.stringify(toolResponse.result),
219
- cache_control: (_c = getCacheControl(providerMetadata)) != null ? _c : getCacheControl(toolResponse.providerMetadata)
1086
+ content: content2,
1087
+ cache_control: (_c = getCacheControl(providerOptions)) != null ? _c : getCacheControl(toolResponse.providerOptions)
220
1088
  });
221
1089
  }
222
1090
  break;
223
1091
  }
224
1092
  default: {
225
- const _exhaustiveCheck = role;
226
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1093
+ break;
227
1094
  }
228
1095
  }
229
1096
  }
230
1097
  return messages;
231
1098
  }
232
-
233
- // src/map-openrouter-chat-logprobs.ts
234
- function mapOpenRouterChatLogProbsOutput(logprobs) {
235
- var _a, _b;
236
- return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
237
- token,
238
- logprob,
239
- topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
240
- token: token2,
241
- logprob: logprob2
242
- })) : []
243
- }))) != null ? _b : void 0;
1099
+ function getToolResultContent(input) {
1100
+ return input.output.type === "text" ? input.output.value : JSON.stringify(input.output.value);
244
1101
  }
245
1102
 
246
- // src/map-openrouter-finish-reason.ts
247
- function mapOpenRouterFinishReason(finishReason) {
248
- switch (finishReason) {
249
- case "stop":
250
- return "stop";
251
- case "length":
252
- return "length";
253
- case "content_filter":
254
- return "content-filter";
255
- case "function_call":
256
- case "tool_calls":
257
- return "tool-calls";
258
- default:
259
- return "unknown";
1103
+ // src/chat/get-tool-choice.ts
1104
+ import { z as z3 } from "zod/v4";
1105
+ var ChatCompletionToolChoiceSchema = z3.union([
1106
+ z3.literal("auto"),
1107
+ z3.literal("none"),
1108
+ z3.literal("required"),
1109
+ z3.object({
1110
+ type: z3.literal("function"),
1111
+ function: z3.object({
1112
+ name: z3.string()
1113
+ })
1114
+ })
1115
+ ]);
1116
+ function getChatCompletionToolChoice(toolChoice) {
1117
+ switch (toolChoice.type) {
1118
+ case "auto":
1119
+ case "none":
1120
+ case "required":
1121
+ return toolChoice.type;
1122
+ case "tool": {
1123
+ return {
1124
+ type: "function",
1125
+ function: { name: toolChoice.toolName }
1126
+ };
1127
+ }
1128
+ default: {
1129
+ toolChoice;
1130
+ throw new Error(`Invalid tool choice type: ${toolChoice}`);
1131
+ }
260
1132
  }
261
1133
  }
262
1134
 
263
- // src/openrouter-error.ts
264
- import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
265
- import { z as z2 } from "zod";
266
- var OpenRouterErrorResponseSchema = z2.object({
267
- error: z2.object({
268
- code: z2.union([z2.string(), z2.number()]).nullable(),
269
- message: z2.string(),
270
- type: z2.string().nullable(),
271
- param: z2.any().nullable()
272
- })
1135
+ // src/chat/schemas.ts
1136
+ import { z as z5 } from "zod/v4";
1137
+ var OpenRouterChatCompletionBaseResponseSchema = z5.object({
1138
+ id: z5.string().optional(),
1139
+ model: z5.string().optional(),
1140
+ usage: z5.object({
1141
+ prompt_tokens: z5.number(),
1142
+ prompt_tokens_details: z5.object({
1143
+ cached_tokens: z5.number()
1144
+ }).nullish(),
1145
+ completion_tokens: z5.number(),
1146
+ completion_tokens_details: z5.object({
1147
+ reasoning_tokens: z5.number()
1148
+ }).nullish(),
1149
+ total_tokens: z5.number(),
1150
+ cost: z5.number().optional()
1151
+ }).nullish()
273
1152
  });
274
- var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
275
- errorSchema: OpenRouterErrorResponseSchema,
276
- errorToMessage: (data) => data.error.message
1153
+ var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
1154
+ choices: z5.array(
1155
+ z5.object({
1156
+ message: z5.object({
1157
+ role: z5.literal("assistant"),
1158
+ content: z5.string().nullable().optional(),
1159
+ reasoning: z5.string().nullable().optional(),
1160
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1161
+ tool_calls: z5.array(
1162
+ z5.object({
1163
+ id: z5.string().optional().nullable(),
1164
+ type: z5.literal("function"),
1165
+ function: z5.object({
1166
+ name: z5.string(),
1167
+ arguments: z5.string()
1168
+ })
1169
+ })
1170
+ ).optional()
1171
+ }),
1172
+ index: z5.number().nullish(),
1173
+ logprobs: z5.object({
1174
+ content: z5.array(
1175
+ z5.object({
1176
+ token: z5.string(),
1177
+ logprob: z5.number(),
1178
+ top_logprobs: z5.array(
1179
+ z5.object({
1180
+ token: z5.string(),
1181
+ logprob: z5.number()
1182
+ })
1183
+ )
1184
+ })
1185
+ ).nullable()
1186
+ }).nullable().optional(),
1187
+ finish_reason: z5.string().optional().nullable()
1188
+ })
1189
+ )
277
1190
  });
1191
+ var OpenRouterStreamChatCompletionChunkSchema = z5.union([
1192
+ OpenRouterChatCompletionBaseResponseSchema.extend({
1193
+ choices: z5.array(
1194
+ z5.object({
1195
+ delta: z5.object({
1196
+ role: z5.enum(["assistant"]).optional(),
1197
+ content: z5.string().nullish(),
1198
+ reasoning: z5.string().nullish().optional(),
1199
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1200
+ tool_calls: z5.array(
1201
+ z5.object({
1202
+ index: z5.number().nullish(),
1203
+ id: z5.string().nullish(),
1204
+ type: z5.literal("function").optional(),
1205
+ function: z5.object({
1206
+ name: z5.string().nullish(),
1207
+ arguments: z5.string().nullish()
1208
+ })
1209
+ })
1210
+ ).nullish()
1211
+ }).nullish(),
1212
+ logprobs: z5.object({
1213
+ content: z5.array(
1214
+ z5.object({
1215
+ token: z5.string(),
1216
+ logprob: z5.number(),
1217
+ top_logprobs: z5.array(
1218
+ z5.object({
1219
+ token: z5.string(),
1220
+ logprob: z5.number()
1221
+ })
1222
+ )
1223
+ })
1224
+ ).nullable()
1225
+ }).nullish(),
1226
+ finish_reason: z5.string().nullable().optional(),
1227
+ index: z5.number().nullish()
1228
+ })
1229
+ )
1230
+ }),
1231
+ OpenRouterErrorResponseSchema
1232
+ ]);
278
1233
 
279
- // src/openrouter-chat-language-model.ts
280
- function isFunctionTool(tool) {
281
- return "parameters" in tool;
282
- }
1234
+ // src/chat/index.ts
283
1235
  var OpenRouterChatLanguageModel = class {
284
1236
  constructor(modelId, settings, config) {
285
- this.specificationVersion = "v1";
1237
+ this.specificationVersion = "v2";
1238
+ this.provider = "openrouter";
286
1239
  this.defaultObjectGenerationMode = "tool";
1240
+ this.supportedUrls = {
1241
+ "image/*": [
1242
+ /^data:image\/[a-zA-Z]+;base64,/,
1243
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1244
+ ],
1245
+ // 'text/*': [/^data:text\//, /^https?:\/\/.+$/],
1246
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1247
+ };
287
1248
  this.modelId = modelId;
288
1249
  this.settings = settings;
289
1250
  this.config = config;
290
1251
  }
291
- get provider() {
292
- return this.config.provider;
293
- }
294
1252
  getArgs({
295
- mode,
296
1253
  prompt,
297
- maxTokens,
1254
+ maxOutputTokens,
298
1255
  temperature,
299
1256
  topP,
300
1257
  frequencyPenalty,
@@ -303,12 +1260,10 @@ var OpenRouterChatLanguageModel = class {
303
1260
  stopSequences,
304
1261
  responseFormat,
305
1262
  topK,
306
- providerMetadata
1263
+ tools,
1264
+ toolChoice
307
1265
  }) {
308
- var _a;
309
- const type = mode.type;
310
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
311
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1266
+ const baseArgs = __spreadValues(__spreadValues({
312
1267
  // model id:
313
1268
  model: this.modelId,
314
1269
  models: this.settings.models,
@@ -319,7 +1274,7 @@ var OpenRouterChatLanguageModel = class {
319
1274
  user: this.settings.user,
320
1275
  parallel_tool_calls: this.settings.parallelToolCalls,
321
1276
  // standardized settings:
322
- max_tokens: maxTokens,
1277
+ max_tokens: maxOutputTokens,
323
1278
  temperature,
324
1279
  top_p: topP,
325
1280
  frequency_penalty: frequencyPenalty,
@@ -334,44 +1289,34 @@ var OpenRouterChatLanguageModel = class {
334
1289
  include_reasoning: this.settings.includeReasoning,
335
1290
  reasoning: this.settings.reasoning,
336
1291
  usage: this.settings.usage
337
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
338
- switch (type) {
339
- case "regular": {
340
- return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
341
- }
342
- case "object-json": {
343
- return __spreadProps(__spreadValues({}, baseArgs), {
344
- response_format: { type: "json_object" }
345
- });
346
- }
347
- case "object-tool": {
348
- return __spreadProps(__spreadValues({}, baseArgs), {
349
- tool_choice: { type: "function", function: { name: mode.tool.name } },
350
- tools: [
351
- {
352
- type: "function",
353
- function: {
354
- name: mode.tool.name,
355
- description: mode.tool.description,
356
- parameters: mode.tool.parameters
357
- }
358
- }
359
- ]
360
- });
361
- }
362
- // Handle all non-text types with a single default case
363
- default: {
364
- const _exhaustiveCheck = type;
365
- throw new UnsupportedFunctionalityError({
366
- functionality: `${_exhaustiveCheck} mode`
367
- });
368
- }
1292
+ }, this.config.extraBody), this.settings.extraBody);
1293
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
1294
+ return __spreadProps(__spreadValues({}, baseArgs), {
1295
+ response_format: { type: "json_object" }
1296
+ });
369
1297
  }
1298
+ if (tools && tools.length > 0) {
1299
+ const mappedTools = tools.filter((tool) => tool.type === "function").map((tool) => ({
1300
+ type: "function",
1301
+ function: {
1302
+ name: tool.name,
1303
+ description: tool.type,
1304
+ parameters: tool.inputSchema
1305
+ }
1306
+ }));
1307
+ return __spreadProps(__spreadValues({}, baseArgs), {
1308
+ tools: mappedTools,
1309
+ tool_choice: toolChoice ? getChatCompletionToolChoice(toolChoice) : void 0
1310
+ });
1311
+ }
1312
+ return baseArgs;
370
1313
  }
371
1314
  async doGenerate(options) {
372
- var _b, _c, _d, _e, _f, _g, _h, _i, _j;
373
- const args = this.getArgs(options);
374
- const { responseHeaders, value: response } = await postJsonToApi({
1315
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t;
1316
+ const providerOptions = options.providerOptions || {};
1317
+ const openrouterOptions = providerOptions.openrouter || {};
1318
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1319
+ const { value: response, responseHeaders } = await postJsonToApi({
375
1320
  url: this.config.url({
376
1321
  path: "/chat/completions",
377
1322
  modelId: this.modelId
@@ -385,46 +1330,33 @@ var OpenRouterChatLanguageModel = class {
385
1330
  abortSignal: options.abortSignal,
386
1331
  fetch: this.config.fetch
387
1332
  });
388
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
389
1333
  const choice = response.choices[0];
390
1334
  if (!choice) {
391
1335
  throw new Error("No choice in response");
392
1336
  }
393
1337
  const usageInfo = response.usage ? {
394
- promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
395
- completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
1338
+ inputTokens: (_a15 = response.usage.prompt_tokens) != null ? _a15 : 0,
1339
+ outputTokens: (_b = response.usage.completion_tokens) != null ? _b : 0,
1340
+ totalTokens: ((_c = response.usage.prompt_tokens) != null ? _c : 0) + ((_d = response.usage.completion_tokens) != null ? _d : 0),
1341
+ reasoningTokens: (_f = (_e = response.usage.completion_tokens_details) == null ? void 0 : _e.reasoning_tokens) != null ? _f : 0,
1342
+ cachedInputTokens: (_h = (_g = response.usage.prompt_tokens_details) == null ? void 0 : _g.cached_tokens) != null ? _h : 0
396
1343
  } : {
397
- promptTokens: 0,
398
- completionTokens: 0
1344
+ inputTokens: 0,
1345
+ outputTokens: 0,
1346
+ totalTokens: 0,
1347
+ reasoningTokens: 0,
1348
+ cachedInputTokens: 0
399
1349
  };
400
- const providerMetadata = {};
401
- if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
402
- providerMetadata.openrouter = {
403
- usage: {
404
- promptTokens: response.usage.prompt_tokens,
405
- promptTokensDetails: response.usage.prompt_tokens_details ? {
406
- cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
407
- } : void 0,
408
- completionTokens: response.usage.completion_tokens,
409
- completionTokensDetails: response.usage.completion_tokens_details ? {
410
- reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
411
- } : void 0,
412
- cost: response.usage.cost,
413
- totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
414
- }
415
- };
416
- }
417
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
418
- const reasoningDetails = (_h = choice.message.reasoning_details) != null ? _h : [];
419
- const reasoning = reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
420
- var _a2;
1350
+ const reasoningDetails = (_i = choice.message.reasoning_details) != null ? _i : [];
1351
+ reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
1352
+ var _a16;
421
1353
  switch (detail.type) {
422
1354
  case "reasoning.text" /* Text */: {
423
1355
  if (detail.text) {
424
1356
  return {
425
1357
  type: "text",
426
1358
  text: detail.text,
427
- signature: (_a2 = detail.signature) != null ? _a2 : void 0
1359
+ signature: (_a16 = detail.signature) != null ? _a16 : void 0
428
1360
  };
429
1361
  }
430
1362
  break;
@@ -458,34 +1390,58 @@ var OpenRouterChatLanguageModel = class {
458
1390
  text: choice.message.reasoning
459
1391
  }
460
1392
  ] : [];
461
- return __spreadValues({
462
- response: {
463
- id: response.id,
464
- modelId: response.model
465
- },
466
- text: (_i = choice.message.content) != null ? _i : void 0,
467
- reasoning,
468
- toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
469
- var _a2;
470
- return {
471
- toolCallType: "function",
472
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1393
+ const content = [];
1394
+ if (choice.message.content) {
1395
+ content.push({
1396
+ type: "text",
1397
+ text: choice.message.content
1398
+ });
1399
+ }
1400
+ if (choice.message.tool_calls) {
1401
+ for (const toolCall of choice.message.tool_calls) {
1402
+ content.push({
1403
+ type: "tool-call",
1404
+ toolCallId: (_j = toolCall.id) != null ? _j : generateId(),
473
1405
  toolName: toolCall.function.name,
474
- args: toolCall.function.arguments
475
- };
476
- }),
1406
+ input: toolCall.function.arguments
1407
+ });
1408
+ }
1409
+ }
1410
+ return {
1411
+ content,
477
1412
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
478
1413
  usage: usageInfo,
479
- rawCall: { rawPrompt, rawSettings },
480
- rawResponse: { headers: responseHeaders },
481
1414
  warnings: [],
482
- logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
483
- }, hasProviderMetadata ? { providerMetadata } : {});
1415
+ providerMetadata: {
1416
+ openrouter: {
1417
+ usage: {
1418
+ promptTokens: (_k = usageInfo.inputTokens) != null ? _k : 0,
1419
+ completionTokens: (_l = usageInfo.outputTokens) != null ? _l : 0,
1420
+ totalTokens: (_m = usageInfo.totalTokens) != null ? _m : 0,
1421
+ cost: (_n = response.usage) == null ? void 0 : _n.cost,
1422
+ promptTokensDetails: {
1423
+ cachedTokens: (_q = (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : 0
1424
+ },
1425
+ completionTokensDetails: {
1426
+ reasoningTokens: (_t = (_s = (_r = response.usage) == null ? void 0 : _r.completion_tokens_details) == null ? void 0 : _s.reasoning_tokens) != null ? _t : 0
1427
+ }
1428
+ }
1429
+ }
1430
+ },
1431
+ request: { body: args },
1432
+ response: {
1433
+ id: response.id,
1434
+ modelId: response.model,
1435
+ headers: responseHeaders
1436
+ }
1437
+ };
484
1438
  }
485
1439
  async doStream(options) {
486
- var _a, _c;
487
- const args = this.getArgs(options);
488
- const { responseHeaders, value: response } = await postJsonToApi({
1440
+ var _a15;
1441
+ const providerOptions = options.providerOptions || {};
1442
+ const openrouterOptions = providerOptions.openrouter || {};
1443
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1444
+ const { value: response, responseHeaders } = await postJsonToApi({
489
1445
  url: this.config.url({
490
1446
  path: "/chat/completions",
491
1447
  modelId: this.modelId
@@ -496,7 +1452,7 @@ var OpenRouterChatLanguageModel = class {
496
1452
  // only include stream_options when in strict compatibility mode:
497
1453
  stream_options: this.config.compatibility === "strict" ? __spreadValues({
498
1454
  include_usage: true
499
- }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
1455
+ }, ((_a15 = this.settings.usage) == null ? void 0 : _a15.include) ? { include_usage: true } : {}) : void 0
500
1456
  }),
501
1457
  failedResponseHandler: openrouterFailedResponseHandler,
502
1458
  successfulResponseHandler: createEventSourceResponseHandler(
@@ -505,21 +1461,21 @@ var OpenRouterChatLanguageModel = class {
505
1461
  abortSignal: options.abortSignal,
506
1462
  fetch: this.config.fetch
507
1463
  });
508
- const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
509
1464
  const toolCalls = [];
510
1465
  let finishReason = "other";
511
- let usage = {
512
- promptTokens: Number.NaN,
513
- completionTokens: Number.NaN
1466
+ const usage = {
1467
+ inputTokens: Number.NaN,
1468
+ outputTokens: Number.NaN,
1469
+ totalTokens: Number.NaN,
1470
+ reasoningTokens: Number.NaN,
1471
+ cachedInputTokens: Number.NaN
514
1472
  };
515
- let logprobs;
516
1473
  const openrouterUsage = {};
517
- const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
518
1474
  return {
519
1475
  stream: response.pipeThrough(
520
1476
  new TransformStream({
521
1477
  transform(chunk, controller) {
522
- var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
1478
+ var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
523
1479
  if (!chunk.success) {
524
1480
  finishReason = "error";
525
1481
  controller.enqueue({ type: "error", error: chunk.error });
@@ -544,20 +1500,23 @@ var OpenRouterChatLanguageModel = class {
544
1500
  });
545
1501
  }
546
1502
  if (value.usage != null) {
547
- usage = {
548
- promptTokens: value.usage.prompt_tokens,
549
- completionTokens: value.usage.completion_tokens
550
- };
1503
+ usage.inputTokens = value.usage.prompt_tokens;
1504
+ usage.outputTokens = value.usage.completion_tokens;
1505
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
551
1506
  openrouterUsage.promptTokens = value.usage.prompt_tokens;
552
1507
  if (value.usage.prompt_tokens_details) {
1508
+ const cachedInputTokens = (_a16 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a16 : 0;
1509
+ usage.cachedInputTokens = cachedInputTokens;
553
1510
  openrouterUsage.promptTokensDetails = {
554
- cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
1511
+ cachedTokens: cachedInputTokens
555
1512
  };
556
1513
  }
557
1514
  openrouterUsage.completionTokens = value.usage.completion_tokens;
558
1515
  if (value.usage.completion_tokens_details) {
1516
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
1517
+ usage.reasoningTokens = reasoningTokens;
559
1518
  openrouterUsage.completionTokensDetails = {
560
- reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
1519
+ reasoningTokens
561
1520
  };
562
1521
  }
563
1522
  openrouterUsage.cost = value.usage.cost;
@@ -574,13 +1533,15 @@ var OpenRouterChatLanguageModel = class {
574
1533
  if (delta.content != null) {
575
1534
  controller.enqueue({
576
1535
  type: "text-delta",
577
- textDelta: delta.content
1536
+ delta: delta.content,
1537
+ id: generateId()
578
1538
  });
579
1539
  }
580
1540
  if (delta.reasoning != null) {
581
1541
  controller.enqueue({
582
- type: "reasoning",
583
- textDelta: delta.reasoning
1542
+ type: "reasoning-delta",
1543
+ delta: delta.reasoning,
1544
+ id: generateId()
584
1545
  });
585
1546
  }
586
1547
  if (delta.reasoning_details && delta.reasoning_details.length > 0) {
@@ -589,14 +1550,15 @@ var OpenRouterChatLanguageModel = class {
589
1550
  case "reasoning.text" /* Text */: {
590
1551
  if (detail.text) {
591
1552
  controller.enqueue({
592
- type: "reasoning",
593
- textDelta: detail.text
1553
+ type: "reasoning-delta",
1554
+ delta: detail.text,
1555
+ id: generateId()
594
1556
  });
595
1557
  }
596
1558
  if (detail.signature) {
597
1559
  controller.enqueue({
598
- type: "reasoning-signature",
599
- signature: detail.signature
1560
+ type: "reasoning-end",
1561
+ id: generateId()
600
1562
  });
601
1563
  }
602
1564
  break;
@@ -604,8 +1566,9 @@ var OpenRouterChatLanguageModel = class {
604
1566
  case "reasoning.encrypted" /* Encrypted */: {
605
1567
  if (detail.data) {
606
1568
  controller.enqueue({
607
- type: "redacted-reasoning",
608
- data: detail.data
1569
+ type: "reasoning-delta",
1570
+ delta: "[REDACTED]",
1571
+ id: generateId()
609
1572
  });
610
1573
  }
611
1574
  break;
@@ -613,8 +1576,9 @@ var OpenRouterChatLanguageModel = class {
613
1576
  case "reasoning.summary" /* Summary */: {
614
1577
  if (detail.summary) {
615
1578
  controller.enqueue({
616
- type: "reasoning",
617
- textDelta: detail.summary
1579
+ type: "reasoning-delta",
1580
+ delta: detail.summary,
1581
+ id: generateId()
618
1582
  });
619
1583
  }
620
1584
  break;
@@ -626,18 +1590,9 @@ var OpenRouterChatLanguageModel = class {
626
1590
  }
627
1591
  }
628
1592
  }
629
- const mappedLogprobs = mapOpenRouterChatLogProbsOutput(
630
- choice == null ? void 0 : choice.logprobs
631
- );
632
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
633
- if (logprobs === void 0) {
634
- logprobs = [];
635
- }
636
- logprobs.push(...mappedLogprobs);
637
- }
638
1593
  if (delta.tool_calls != null) {
639
1594
  for (const toolCallDelta of delta.tool_calls) {
640
- const index = toolCallDelta.index;
1595
+ const index = (_c = toolCallDelta.index) != null ? _c : toolCalls.length - 1;
641
1596
  if (toolCalls[index] == null) {
642
1597
  if (toolCallDelta.type !== "function") {
643
1598
  throw new InvalidResponseDataError({
@@ -651,7 +1606,7 @@ var OpenRouterChatLanguageModel = class {
651
1606
  message: `Expected 'id' to be a string.`
652
1607
  });
653
1608
  }
654
- if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
1609
+ if (((_d = toolCallDelta.function) == null ? void 0 : _d.name) == null) {
655
1610
  throw new InvalidResponseDataError({
656
1611
  data: toolCallDelta,
657
1612
  message: `Expected 'function.name' to be a string.`
@@ -662,7 +1617,7 @@ var OpenRouterChatLanguageModel = class {
662
1617
  type: "function",
663
1618
  function: {
664
1619
  name: toolCallDelta.function.name,
665
- arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
1620
+ arguments: (_e = toolCallDelta.function.arguments) != null ? _e : ""
666
1621
  },
667
1622
  sent: false
668
1623
  };
@@ -670,20 +1625,26 @@ var OpenRouterChatLanguageModel = class {
670
1625
  if (toolCall2 == null) {
671
1626
  throw new Error("Tool call is missing");
672
1627
  }
673
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
1628
+ if (((_f = toolCall2.function) == null ? void 0 : _f.name) != null && ((_g = toolCall2.function) == null ? void 0 : _g.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
674
1629
  controller.enqueue({
675
- type: "tool-call-delta",
676
- toolCallType: "function",
677
- toolCallId: toolCall2.id,
678
- toolName: toolCall2.function.name,
679
- argsTextDelta: toolCall2.function.arguments
1630
+ type: "tool-input-start",
1631
+ id: toolCall2.id,
1632
+ toolName: toolCall2.function.name
1633
+ });
1634
+ controller.enqueue({
1635
+ type: "tool-input-delta",
1636
+ id: toolCall2.id,
1637
+ delta: toolCall2.function.arguments
1638
+ });
1639
+ controller.enqueue({
1640
+ type: "tool-input-end",
1641
+ id: toolCall2.id
680
1642
  });
681
1643
  controller.enqueue({
682
1644
  type: "tool-call",
683
- toolCallType: "function",
684
- toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
1645
+ toolCallId: toolCall2.id,
685
1646
  toolName: toolCall2.function.name,
686
- args: toolCall2.function.arguments
1647
+ input: toolCall2.function.arguments
687
1648
  });
688
1649
  toolCall2.sent = true;
689
1650
  }
@@ -693,23 +1654,27 @@ var OpenRouterChatLanguageModel = class {
693
1654
  if (toolCall == null) {
694
1655
  throw new Error("Tool call is missing");
695
1656
  }
696
- if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
697
- toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
1657
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.name) != null) {
1658
+ controller.enqueue({
1659
+ type: "tool-input-start",
1660
+ id: toolCall.id,
1661
+ toolName: toolCall.function.name
1662
+ });
1663
+ }
1664
+ if (((_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null) {
1665
+ toolCall.function.arguments += (_k = (_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null ? _k : "";
698
1666
  }
699
1667
  controller.enqueue({
700
- type: "tool-call-delta",
701
- toolCallType: "function",
702
- toolCallId: toolCall.id,
703
- toolName: toolCall.function.name,
704
- argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
1668
+ type: "tool-input-delta",
1669
+ id: toolCall.id,
1670
+ delta: (_l = toolCallDelta.function.arguments) != null ? _l : ""
705
1671
  });
706
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
1672
+ if (((_m = toolCall.function) == null ? void 0 : _m.name) != null && ((_n = toolCall.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall.function.arguments)) {
707
1673
  controller.enqueue({
708
1674
  type: "tool-call",
709
- toolCallType: "function",
710
- toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
1675
+ toolCallId: (_o = toolCall.id) != null ? _o : generateId(),
711
1676
  toolName: toolCall.function.name,
712
- args: toolCall.function.arguments
1677
+ input: toolCall.function.arguments
713
1678
  });
714
1679
  toolCall.sent = true;
715
1680
  }
@@ -717,206 +1682,42 @@ var OpenRouterChatLanguageModel = class {
717
1682
  }
718
1683
  },
719
1684
  flush(controller) {
720
- var _a2;
1685
+ var _a16;
721
1686
  if (finishReason === "tool-calls") {
722
1687
  for (const toolCall of toolCalls) {
723
1688
  if (!toolCall.sent) {
724
1689
  controller.enqueue({
725
1690
  type: "tool-call",
726
- toolCallType: "function",
727
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1691
+ toolCallId: (_a16 = toolCall.id) != null ? _a16 : generateId(),
728
1692
  toolName: toolCall.function.name,
729
1693
  // Coerce invalid arguments to an empty JSON object
730
- args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
1694
+ input: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
731
1695
  });
732
1696
  toolCall.sent = true;
733
1697
  }
734
1698
  }
735
1699
  }
736
- const providerMetadata = {};
737
- if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
738
- providerMetadata.openrouter = {
739
- usage: openrouterUsage
740
- };
741
- }
742
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
743
- controller.enqueue(__spreadValues({
1700
+ controller.enqueue({
744
1701
  type: "finish",
745
1702
  finishReason,
746
- logprobs,
747
- usage
748
- }, hasProviderMetadata ? { providerMetadata } : {}));
1703
+ usage,
1704
+ providerMetadata: {
1705
+ openrouter: {
1706
+ usage: openrouterUsage
1707
+ }
1708
+ }
1709
+ });
749
1710
  }
750
1711
  })
751
1712
  ),
752
- rawCall: { rawPrompt, rawSettings },
753
- rawResponse: { headers: responseHeaders },
754
- warnings: []
1713
+ warnings: [],
1714
+ request: { body: args },
1715
+ response: { headers: responseHeaders }
755
1716
  };
756
1717
  }
757
1718
  };
758
- var OpenRouterChatCompletionBaseResponseSchema = z3.object({
759
- id: z3.string().optional(),
760
- model: z3.string().optional(),
761
- usage: z3.object({
762
- prompt_tokens: z3.number(),
763
- prompt_tokens_details: z3.object({
764
- cached_tokens: z3.number()
765
- }).nullish(),
766
- completion_tokens: z3.number(),
767
- completion_tokens_details: z3.object({
768
- reasoning_tokens: z3.number()
769
- }).nullish(),
770
- total_tokens: z3.number(),
771
- cost: z3.number().optional()
772
- }).nullish()
773
- });
774
- var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
775
- choices: z3.array(
776
- z3.object({
777
- message: z3.object({
778
- role: z3.literal("assistant"),
779
- content: z3.string().nullable().optional(),
780
- reasoning: z3.string().nullable().optional(),
781
- reasoning_details: ReasoningDetailArraySchema.nullish(),
782
- tool_calls: z3.array(
783
- z3.object({
784
- id: z3.string().optional().nullable(),
785
- type: z3.literal("function"),
786
- function: z3.object({
787
- name: z3.string(),
788
- arguments: z3.string()
789
- })
790
- })
791
- ).optional()
792
- }),
793
- index: z3.number(),
794
- logprobs: z3.object({
795
- content: z3.array(
796
- z3.object({
797
- token: z3.string(),
798
- logprob: z3.number(),
799
- top_logprobs: z3.array(
800
- z3.object({
801
- token: z3.string(),
802
- logprob: z3.number()
803
- })
804
- )
805
- })
806
- ).nullable()
807
- }).nullable().optional(),
808
- finish_reason: z3.string().optional().nullable()
809
- })
810
- )
811
- });
812
- var OpenRouterStreamChatCompletionChunkSchema = z3.union([
813
- OpenRouterChatCompletionBaseResponseSchema.extend({
814
- choices: z3.array(
815
- z3.object({
816
- delta: z3.object({
817
- role: z3.enum(["assistant"]).optional(),
818
- content: z3.string().nullish(),
819
- reasoning: z3.string().nullish().optional(),
820
- reasoning_details: ReasoningDetailArraySchema.nullish(),
821
- tool_calls: z3.array(
822
- z3.object({
823
- index: z3.number(),
824
- id: z3.string().nullish(),
825
- type: z3.literal("function").optional(),
826
- function: z3.object({
827
- name: z3.string().nullish(),
828
- arguments: z3.string().nullish()
829
- })
830
- })
831
- ).nullish()
832
- }).nullish(),
833
- logprobs: z3.object({
834
- content: z3.array(
835
- z3.object({
836
- token: z3.string(),
837
- logprob: z3.number(),
838
- top_logprobs: z3.array(
839
- z3.object({
840
- token: z3.string(),
841
- logprob: z3.number()
842
- })
843
- )
844
- })
845
- ).nullable()
846
- }).nullish(),
847
- finish_reason: z3.string().nullable().optional(),
848
- index: z3.number()
849
- })
850
- )
851
- }),
852
- OpenRouterErrorResponseSchema
853
- ]);
854
- function prepareToolsAndToolChoice(mode) {
855
- var _a;
856
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
857
- if (tools == null) {
858
- return { tools: void 0, tool_choice: void 0 };
859
- }
860
- const mappedTools = tools.map((tool) => {
861
- if (isFunctionTool(tool)) {
862
- return {
863
- type: "function",
864
- function: {
865
- name: tool.name,
866
- description: tool.description,
867
- parameters: tool.parameters
868
- }
869
- };
870
- }
871
- return {
872
- type: "function",
873
- function: {
874
- name: tool.name
875
- }
876
- };
877
- });
878
- const toolChoice = mode.toolChoice;
879
- if (toolChoice == null) {
880
- return { tools: mappedTools, tool_choice: void 0 };
881
- }
882
- const type = toolChoice.type;
883
- switch (type) {
884
- case "auto":
885
- case "none":
886
- case "required":
887
- return { tools: mappedTools, tool_choice: type };
888
- case "tool":
889
- return {
890
- tools: mappedTools,
891
- tool_choice: {
892
- type: "function",
893
- function: {
894
- name: toolChoice.toolName
895
- }
896
- }
897
- };
898
- default: {
899
- const _exhaustiveCheck = type;
900
- throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
901
- }
902
- }
903
- }
904
-
905
- // src/openrouter-completion-language-model.ts
906
- import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
907
- import {
908
- combineHeaders as combineHeaders2,
909
- createEventSourceResponseHandler as createEventSourceResponseHandler2,
910
- createJsonResponseHandler as createJsonResponseHandler2,
911
- postJsonToApi as postJsonToApi2
912
- } from "@ai-sdk/provider-utils";
913
- import { z as z4 } from "zod";
914
1719
 
915
- // src/convert-to-openrouter-completion-prompt.ts
916
- import {
917
- InvalidPromptError,
918
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
919
- } from "@ai-sdk/provider";
1720
+ // src/completion/convert-to-openrouter-completion-prompt.ts
920
1721
  function convertToOpenRouterCompletionPrompt({
921
1722
  prompt,
922
1723
  inputFormat,
@@ -937,7 +1738,7 @@ function convertToOpenRouterCompletionPrompt({
937
1738
  switch (role) {
938
1739
  case "system": {
939
1740
  throw new InvalidPromptError({
940
- message: "Unexpected system message in prompt: ${content}",
1741
+ message: `Unexpected system message in prompt: ${content}`,
941
1742
  prompt
942
1743
  });
943
1744
  }
@@ -947,21 +1748,13 @@ function convertToOpenRouterCompletionPrompt({
947
1748
  case "text": {
948
1749
  return part.text;
949
1750
  }
950
- case "image": {
951
- throw new UnsupportedFunctionalityError2({
952
- functionality: "images"
953
- });
954
- }
955
1751
  case "file": {
956
- throw new UnsupportedFunctionalityError2({
1752
+ throw new UnsupportedFunctionalityError({
957
1753
  functionality: "file attachments"
958
1754
  });
959
1755
  }
960
1756
  default: {
961
- const _exhaustiveCheck = part;
962
- throw new Error(
963
- `Unsupported content type: ${_exhaustiveCheck}`
964
- );
1757
+ return "";
965
1758
  }
966
1759
  }
967
1760
  }).join("");
@@ -972,39 +1765,38 @@ ${userMessage}
972
1765
  break;
973
1766
  }
974
1767
  case "assistant": {
975
- const assistantMessage = content.map((part) => {
976
- switch (part.type) {
977
- case "text": {
978
- return part.text;
979
- }
980
- case "tool-call": {
981
- throw new UnsupportedFunctionalityError2({
982
- functionality: "tool-call messages"
983
- });
984
- }
985
- case "reasoning": {
986
- throw new UnsupportedFunctionalityError2({
987
- functionality: "reasoning messages"
988
- });
989
- }
990
- case "redacted-reasoning": {
991
- throw new UnsupportedFunctionalityError2({
992
- functionality: "redacted reasoning messages"
993
- });
994
- }
995
- case "file": {
996
- throw new UnsupportedFunctionalityError2({
997
- functionality: "file attachments"
998
- });
999
- }
1000
- default: {
1001
- const _exhaustiveCheck = part;
1002
- throw new Error(
1003
- `Unsupported content type: ${_exhaustiveCheck}`
1004
- );
1768
+ const assistantMessage = content.map(
1769
+ (part) => {
1770
+ switch (part.type) {
1771
+ case "text": {
1772
+ return part.text;
1773
+ }
1774
+ case "tool-call": {
1775
+ throw new UnsupportedFunctionalityError({
1776
+ functionality: "tool-call messages"
1777
+ });
1778
+ }
1779
+ case "tool-result": {
1780
+ throw new UnsupportedFunctionalityError({
1781
+ functionality: "tool-result messages"
1782
+ });
1783
+ }
1784
+ case "reasoning": {
1785
+ throw new UnsupportedFunctionalityError({
1786
+ functionality: "reasoning messages"
1787
+ });
1788
+ }
1789
+ case "file": {
1790
+ throw new UnsupportedFunctionalityError({
1791
+ functionality: "file attachments"
1792
+ });
1793
+ }
1794
+ default: {
1795
+ return "";
1796
+ }
1005
1797
  }
1006
1798
  }
1007
- }).join("");
1799
+ ).join("");
1008
1800
  text += `${assistant}:
1009
1801
  ${assistantMessage}
1010
1802
 
@@ -1012,13 +1804,12 @@ ${assistantMessage}
1012
1804
  break;
1013
1805
  }
1014
1806
  case "tool": {
1015
- throw new UnsupportedFunctionalityError2({
1807
+ throw new UnsupportedFunctionalityError({
1016
1808
  functionality: "tool messages"
1017
1809
  });
1018
1810
  }
1019
1811
  default: {
1020
- const _exhaustiveCheck = role;
1021
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1812
+ break;
1022
1813
  }
1023
1814
  }
1024
1815
  }
@@ -1029,40 +1820,63 @@ ${assistantMessage}
1029
1820
  };
1030
1821
  }
1031
1822
 
1032
- // src/map-openrouter-completion-logprobs.ts
1033
- function mapOpenRouterCompletionLogProbs(logprobs) {
1034
- return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => {
1035
- var _a, _b;
1036
- return {
1037
- token,
1038
- logprob: (_a = logprobs.token_logprobs[index]) != null ? _a : 0,
1039
- topLogprobs: logprobs.top_logprobs ? Object.entries((_b = logprobs.top_logprobs[index]) != null ? _b : {}).map(
1040
- ([token2, logprob]) => ({
1041
- token: token2,
1042
- logprob
1043
- })
1044
- ) : []
1045
- };
1046
- });
1047
- }
1823
+ // src/completion/schemas.ts
1824
+ import { z as z6 } from "zod/v4";
1825
+ var OpenRouterCompletionChunkSchema = z6.union([
1826
+ z6.object({
1827
+ id: z6.string().optional(),
1828
+ model: z6.string().optional(),
1829
+ choices: z6.array(
1830
+ z6.object({
1831
+ text: z6.string(),
1832
+ reasoning: z6.string().nullish().optional(),
1833
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1834
+ finish_reason: z6.string().nullish(),
1835
+ index: z6.number().nullish(),
1836
+ logprobs: z6.object({
1837
+ tokens: z6.array(z6.string()),
1838
+ token_logprobs: z6.array(z6.number()),
1839
+ top_logprobs: z6.array(z6.record(z6.string(), z6.number())).nullable()
1840
+ }).nullable().optional()
1841
+ })
1842
+ ),
1843
+ usage: z6.object({
1844
+ prompt_tokens: z6.number(),
1845
+ prompt_tokens_details: z6.object({
1846
+ cached_tokens: z6.number()
1847
+ }).nullish(),
1848
+ completion_tokens: z6.number(),
1849
+ completion_tokens_details: z6.object({
1850
+ reasoning_tokens: z6.number()
1851
+ }).nullish(),
1852
+ total_tokens: z6.number(),
1853
+ cost: z6.number().optional()
1854
+ }).nullish()
1855
+ }),
1856
+ OpenRouterErrorResponseSchema
1857
+ ]);
1048
1858
 
1049
- // src/openrouter-completion-language-model.ts
1859
+ // src/completion/index.ts
1050
1860
  var OpenRouterCompletionLanguageModel = class {
1051
1861
  constructor(modelId, settings, config) {
1052
- this.specificationVersion = "v1";
1862
+ this.specificationVersion = "v2";
1863
+ this.provider = "openrouter";
1864
+ this.supportedUrls = {
1865
+ "image/*": [
1866
+ /^data:image\/[a-zA-Z]+;base64,/,
1867
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1868
+ ],
1869
+ "text/*": [/^data:text\//, /^https?:\/\/.+$/],
1870
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1871
+ };
1053
1872
  this.defaultObjectGenerationMode = void 0;
1054
1873
  this.modelId = modelId;
1055
1874
  this.settings = settings;
1056
1875
  this.config = config;
1057
1876
  }
1058
- get provider() {
1059
- return this.config.provider;
1060
- }
1061
1877
  getArgs({
1062
- mode,
1063
- inputFormat,
1064
1878
  prompt,
1065
- maxTokens,
1879
+ maxOutputTokens,
1066
1880
  temperature,
1067
1881
  topP,
1068
1882
  frequencyPenalty,
@@ -1071,16 +1885,24 @@ var OpenRouterCompletionLanguageModel = class {
1071
1885
  responseFormat,
1072
1886
  topK,
1073
1887
  stopSequences,
1074
- providerMetadata
1888
+ tools,
1889
+ toolChoice
1075
1890
  }) {
1076
- var _a, _b;
1077
- const type = mode.type;
1078
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
1079
1891
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
1080
1892
  prompt,
1081
- inputFormat
1893
+ inputFormat: "prompt"
1082
1894
  });
1083
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1895
+ if (tools == null ? void 0 : tools.length) {
1896
+ throw new UnsupportedFunctionalityError({
1897
+ functionality: "tools"
1898
+ });
1899
+ }
1900
+ if (toolChoice) {
1901
+ throw new UnsupportedFunctionalityError({
1902
+ functionality: "toolChoice"
1903
+ });
1904
+ }
1905
+ return __spreadValues(__spreadValues({
1084
1906
  // model id:
1085
1907
  model: this.modelId,
1086
1908
  models: this.settings.models,
@@ -1090,7 +1912,7 @@ var OpenRouterCompletionLanguageModel = class {
1090
1912
  suffix: this.settings.suffix,
1091
1913
  user: this.settings.user,
1092
1914
  // standardized settings:
1093
- max_tokens: maxTokens,
1915
+ max_tokens: maxOutputTokens,
1094
1916
  temperature,
1095
1917
  top_p: topP,
1096
1918
  frequency_penalty: frequencyPenalty,
@@ -1104,58 +1926,27 @@ var OpenRouterCompletionLanguageModel = class {
1104
1926
  // OpenRouter specific settings:
1105
1927
  include_reasoning: this.settings.includeReasoning,
1106
1928
  reasoning: this.settings.reasoning
1107
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
1108
- switch (type) {
1109
- case "regular": {
1110
- if ((_b = mode.tools) == null ? void 0 : _b.length) {
1111
- throw new UnsupportedFunctionalityError3({
1112
- functionality: "tools"
1113
- });
1114
- }
1115
- if (mode.toolChoice) {
1116
- throw new UnsupportedFunctionalityError3({
1117
- functionality: "toolChoice"
1118
- });
1119
- }
1120
- return baseArgs;
1121
- }
1122
- case "object-json": {
1123
- throw new UnsupportedFunctionalityError3({
1124
- functionality: "object-json mode"
1125
- });
1126
- }
1127
- case "object-tool": {
1128
- throw new UnsupportedFunctionalityError3({
1129
- functionality: "object-tool mode"
1130
- });
1131
- }
1132
- // Handle all non-text types with a single default case
1133
- default: {
1134
- const _exhaustiveCheck = type;
1135
- throw new UnsupportedFunctionalityError3({
1136
- functionality: `${_exhaustiveCheck} mode`
1137
- });
1138
- }
1139
- }
1929
+ }, this.config.extraBody), this.settings.extraBody);
1140
1930
  }
1141
1931
  async doGenerate(options) {
1142
- var _b, _c, _d, _e, _f;
1143
- const args = this.getArgs(options);
1144
- const { responseHeaders, value: response } = await postJsonToApi2({
1932
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
1933
+ const providerOptions = options.providerOptions || {};
1934
+ const openrouterOptions = providerOptions.openrouter || {};
1935
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1936
+ const { value: response, responseHeaders } = await postJsonToApi({
1145
1937
  url: this.config.url({
1146
1938
  path: "/completions",
1147
1939
  modelId: this.modelId
1148
1940
  }),
1149
- headers: combineHeaders2(this.config.headers(), options.headers),
1941
+ headers: combineHeaders(this.config.headers(), options.headers),
1150
1942
  body: args,
1151
1943
  failedResponseHandler: openrouterFailedResponseHandler,
1152
- successfulResponseHandler: createJsonResponseHandler2(
1944
+ successfulResponseHandler: createJsonResponseHandler(
1153
1945
  OpenRouterCompletionChunkSchema
1154
1946
  ),
1155
1947
  abortSignal: options.abortSignal,
1156
1948
  fetch: this.config.fetch
1157
1949
  });
1158
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1159
1950
  if ("error" in response) {
1160
1951
  throw new Error(`${response.error.message}`);
1161
1952
  }
@@ -1164,54 +1955,62 @@ var OpenRouterCompletionLanguageModel = class {
1164
1955
  throw new Error("No choice in OpenRouter completion response");
1165
1956
  }
1166
1957
  return {
1167
- response: {
1168
- id: response.id,
1169
- modelId: response.model
1170
- },
1171
- text: (_b = choice.text) != null ? _b : "",
1172
- reasoning: choice.reasoning || void 0,
1958
+ content: [
1959
+ {
1960
+ type: "text",
1961
+ text: (_a15 = choice.text) != null ? _a15 : ""
1962
+ }
1963
+ ],
1964
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1173
1965
  usage: {
1174
- promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : 0,
1175
- completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : 0
1966
+ inputTokens: (_c = (_b = response.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : 0,
1967
+ outputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.completion_tokens) != null ? _e : 0,
1968
+ totalTokens: ((_g = (_f = response.usage) == null ? void 0 : _f.prompt_tokens) != null ? _g : 0) + ((_i = (_h = response.usage) == null ? void 0 : _h.completion_tokens) != null ? _i : 0),
1969
+ reasoningTokens: (_l = (_k = (_j = response.usage) == null ? void 0 : _j.completion_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : 0,
1970
+ cachedInputTokens: (_o = (_n = (_m = response.usage) == null ? void 0 : _m.prompt_tokens_details) == null ? void 0 : _n.cached_tokens) != null ? _o : 0
1176
1971
  },
1177
- finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1178
- logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
1179
- rawCall: { rawPrompt, rawSettings },
1180
- rawResponse: { headers: responseHeaders },
1181
- warnings: []
1972
+ warnings: [],
1973
+ response: {
1974
+ headers: responseHeaders
1975
+ }
1182
1976
  };
1183
1977
  }
1184
1978
  async doStream(options) {
1185
- const args = this.getArgs(options);
1186
- const { responseHeaders, value: response } = await postJsonToApi2({
1979
+ const providerOptions = options.providerOptions || {};
1980
+ const openrouterOptions = providerOptions.openrouter || {};
1981
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1982
+ const { value: response, responseHeaders } = await postJsonToApi({
1187
1983
  url: this.config.url({
1188
1984
  path: "/completions",
1189
1985
  modelId: this.modelId
1190
1986
  }),
1191
- headers: combineHeaders2(this.config.headers(), options.headers),
1192
- body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
1987
+ headers: combineHeaders(this.config.headers(), options.headers),
1988
+ body: __spreadProps(__spreadValues({}, args), {
1193
1989
  stream: true,
1194
1990
  // only include stream_options when in strict compatibility mode:
1195
1991
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1196
1992
  }),
1197
1993
  failedResponseHandler: openrouterFailedResponseHandler,
1198
- successfulResponseHandler: createEventSourceResponseHandler2(
1994
+ successfulResponseHandler: createEventSourceResponseHandler(
1199
1995
  OpenRouterCompletionChunkSchema
1200
1996
  ),
1201
1997
  abortSignal: options.abortSignal,
1202
1998
  fetch: this.config.fetch
1203
1999
  });
1204
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1205
2000
  let finishReason = "other";
1206
- let usage = {
1207
- promptTokens: Number.NaN,
1208
- completionTokens: Number.NaN
2001
+ const usage = {
2002
+ inputTokens: Number.NaN,
2003
+ outputTokens: Number.NaN,
2004
+ totalTokens: Number.NaN,
2005
+ reasoningTokens: Number.NaN,
2006
+ cachedInputTokens: Number.NaN
1209
2007
  };
1210
- let logprobs;
2008
+ const openrouterUsage = {};
1211
2009
  return {
1212
2010
  stream: response.pipeThrough(
1213
2011
  new TransformStream({
1214
2012
  transform(chunk, controller) {
2013
+ var _a15, _b;
1215
2014
  if (!chunk.success) {
1216
2015
  finishReason = "error";
1217
2016
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1224,10 +2023,27 @@ var OpenRouterCompletionLanguageModel = class {
1224
2023
  return;
1225
2024
  }
1226
2025
  if (value.usage != null) {
1227
- usage = {
1228
- promptTokens: value.usage.prompt_tokens,
1229
- completionTokens: value.usage.completion_tokens
1230
- };
2026
+ usage.inputTokens = value.usage.prompt_tokens;
2027
+ usage.outputTokens = value.usage.completion_tokens;
2028
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
2029
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
2030
+ if (value.usage.prompt_tokens_details) {
2031
+ const cachedInputTokens = (_a15 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a15 : 0;
2032
+ usage.cachedInputTokens = cachedInputTokens;
2033
+ openrouterUsage.promptTokensDetails = {
2034
+ cachedTokens: cachedInputTokens
2035
+ };
2036
+ }
2037
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
2038
+ if (value.usage.completion_tokens_details) {
2039
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
2040
+ usage.reasoningTokens = reasoningTokens;
2041
+ openrouterUsage.completionTokensDetails = {
2042
+ reasoningTokens
2043
+ };
2044
+ }
2045
+ openrouterUsage.cost = value.usage.cost;
2046
+ openrouterUsage.totalTokens = value.usage.total_tokens;
1231
2047
  }
1232
2048
  const choice = value.choices[0];
1233
2049
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -1236,69 +2052,40 @@ var OpenRouterCompletionLanguageModel = class {
1236
2052
  if ((choice == null ? void 0 : choice.text) != null) {
1237
2053
  controller.enqueue({
1238
2054
  type: "text-delta",
1239
- textDelta: choice.text
2055
+ delta: choice.text,
2056
+ id: generateId()
1240
2057
  });
1241
2058
  }
1242
- const mappedLogprobs = mapOpenRouterCompletionLogProbs(
1243
- choice == null ? void 0 : choice.logprobs
1244
- );
1245
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
1246
- if (logprobs === void 0) {
1247
- logprobs = [];
1248
- }
1249
- logprobs.push(...mappedLogprobs);
1250
- }
1251
2059
  },
1252
2060
  flush(controller) {
1253
2061
  controller.enqueue({
1254
2062
  type: "finish",
1255
2063
  finishReason,
1256
- logprobs,
1257
- usage
2064
+ usage,
2065
+ providerMetadata: {
2066
+ openrouter: {
2067
+ usage: openrouterUsage
2068
+ }
2069
+ }
1258
2070
  });
1259
2071
  }
1260
2072
  })
1261
2073
  ),
1262
- rawCall: { rawPrompt, rawSettings },
1263
- rawResponse: { headers: responseHeaders },
1264
- warnings: []
2074
+ response: {
2075
+ headers: responseHeaders
2076
+ }
1265
2077
  };
1266
2078
  }
1267
2079
  };
1268
- var OpenRouterCompletionChunkSchema = z4.union([
1269
- z4.object({
1270
- id: z4.string().optional(),
1271
- model: z4.string().optional(),
1272
- choices: z4.array(
1273
- z4.object({
1274
- text: z4.string(),
1275
- reasoning: z4.string().nullish().optional(),
1276
- reasoning_details: ReasoningDetailArraySchema.nullish(),
1277
- finish_reason: z4.string().nullish(),
1278
- index: z4.number(),
1279
- logprobs: z4.object({
1280
- tokens: z4.array(z4.string()),
1281
- token_logprobs: z4.array(z4.number()),
1282
- top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
1283
- }).nullable().optional()
1284
- })
1285
- ),
1286
- usage: z4.object({
1287
- prompt_tokens: z4.number(),
1288
- completion_tokens: z4.number()
1289
- }).optional().nullable()
1290
- }),
1291
- OpenRouterErrorResponseSchema
1292
- ]);
1293
2080
 
1294
- // src/openrouter-facade.ts
2081
+ // src/facade.ts
1295
2082
  var OpenRouter = class {
1296
2083
  /**
1297
2084
  * Creates a new OpenRouter provider instance.
1298
2085
  */
1299
2086
  constructor(options = {}) {
1300
- var _a, _b;
1301
- this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
2087
+ var _a15, _b;
2088
+ this.baseURL = (_b = withoutTrailingSlash((_a15 = options.baseURL) != null ? _a15 : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
1302
2089
  this.apiKey = options.apiKey;
1303
2090
  this.headers = options.headers;
1304
2091
  }
@@ -1332,14 +2119,13 @@ var OpenRouter = class {
1332
2119
  }
1333
2120
  };
1334
2121
 
1335
- // src/openrouter-provider.ts
1336
- import { loadApiKey as loadApiKey2, withoutTrailingSlash as withoutTrailingSlash2 } from "@ai-sdk/provider-utils";
2122
+ // src/provider.ts
1337
2123
  function createOpenRouter(options = {}) {
1338
- var _a, _b, _c;
1339
- const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
2124
+ var _a15, _b, _c;
2125
+ const baseURL = (_b = withoutTrailingSlash((_a15 = options.baseURL) != null ? _a15 : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
1340
2126
  const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
1341
2127
  const getHeaders = () => __spreadValues({
1342
- Authorization: `Bearer ${loadApiKey2({
2128
+ Authorization: `Bearer ${loadApiKey({
1343
2129
  apiKey: options.apiKey,
1344
2130
  environmentVariableName: "OPENROUTER_API_KEY",
1345
2131
  description: "OpenRouter"