@openrouter/ai-sdk-provider 0.7.2 → 1.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,21 +17,830 @@ var __spreadValues = (a, b) => {
17
17
  return a;
18
18
  };
19
19
  var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
20
- var __objRest = (source, exclude) => {
21
- var target = {};
22
- for (var prop in source)
23
- if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
24
- target[prop] = source[prop];
25
- if (source != null && __getOwnPropSymbols)
26
- for (var prop of __getOwnPropSymbols(source)) {
27
- if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
28
- target[prop] = source[prop];
20
+
21
+ // node_modules/.pnpm/@ai-sdk+provider@2.0.0-beta.1/node_modules/@ai-sdk/provider/dist/index.mjs
22
+ var marker = "vercel.ai.error";
23
+ var symbol = Symbol.for(marker);
24
+ var _a;
25
+ var _AISDKError = class _AISDKError2 extends Error {
26
+ /**
27
+ * Creates an AI SDK Error.
28
+ *
29
+ * @param {Object} params - The parameters for creating the error.
30
+ * @param {string} params.name - The name of the error.
31
+ * @param {string} params.message - The error message.
32
+ * @param {unknown} [params.cause] - The underlying cause of the error.
33
+ */
34
+ constructor({
35
+ name: name14,
36
+ message,
37
+ cause
38
+ }) {
39
+ super(message);
40
+ this[_a] = true;
41
+ this.name = name14;
42
+ this.cause = cause;
43
+ }
44
+ /**
45
+ * Checks if the given error is an AI SDK Error.
46
+ * @param {unknown} error - The error to check.
47
+ * @returns {boolean} True if the error is an AI SDK Error, false otherwise.
48
+ */
49
+ static isInstance(error) {
50
+ return _AISDKError2.hasMarker(error, marker);
51
+ }
52
+ static hasMarker(error, marker15) {
53
+ const markerSymbol = Symbol.for(marker15);
54
+ return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
55
+ }
56
+ };
57
+ _a = symbol;
58
+ var AISDKError = _AISDKError;
59
+ var name = "AI_APICallError";
60
+ var marker2 = `vercel.ai.error.${name}`;
61
+ var symbol2 = Symbol.for(marker2);
62
+ var _a2;
63
+ var APICallError = class extends AISDKError {
64
+ constructor({
65
+ message,
66
+ url,
67
+ requestBodyValues,
68
+ statusCode,
69
+ responseHeaders,
70
+ responseBody,
71
+ cause,
72
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
73
+ statusCode === 409 || // conflict
74
+ statusCode === 429 || // too many requests
75
+ statusCode >= 500),
76
+ // server error
77
+ data
78
+ }) {
79
+ super({ name, message, cause });
80
+ this[_a2] = true;
81
+ this.url = url;
82
+ this.requestBodyValues = requestBodyValues;
83
+ this.statusCode = statusCode;
84
+ this.responseHeaders = responseHeaders;
85
+ this.responseBody = responseBody;
86
+ this.isRetryable = isRetryable;
87
+ this.data = data;
88
+ }
89
+ static isInstance(error) {
90
+ return AISDKError.hasMarker(error, marker2);
91
+ }
92
+ };
93
+ _a2 = symbol2;
94
+ var name2 = "AI_EmptyResponseBodyError";
95
+ var marker3 = `vercel.ai.error.${name2}`;
96
+ var symbol3 = Symbol.for(marker3);
97
+ var _a3;
98
+ var EmptyResponseBodyError = class extends AISDKError {
99
+ // used in isInstance
100
+ constructor({ message = "Empty response body" } = {}) {
101
+ super({ name: name2, message });
102
+ this[_a3] = true;
103
+ }
104
+ static isInstance(error) {
105
+ return AISDKError.hasMarker(error, marker3);
106
+ }
107
+ };
108
+ _a3 = symbol3;
109
+ function getErrorMessage(error) {
110
+ if (error == null) {
111
+ return "unknown error";
112
+ }
113
+ if (typeof error === "string") {
114
+ return error;
115
+ }
116
+ if (error instanceof Error) {
117
+ return error.message;
118
+ }
119
+ return JSON.stringify(error);
120
+ }
121
+ var name3 = "AI_InvalidArgumentError";
122
+ var marker4 = `vercel.ai.error.${name3}`;
123
+ var symbol4 = Symbol.for(marker4);
124
+ var _a4;
125
+ var InvalidArgumentError = class extends AISDKError {
126
+ constructor({
127
+ message,
128
+ cause,
129
+ argument
130
+ }) {
131
+ super({ name: name3, message, cause });
132
+ this[_a4] = true;
133
+ this.argument = argument;
134
+ }
135
+ static isInstance(error) {
136
+ return AISDKError.hasMarker(error, marker4);
137
+ }
138
+ };
139
+ _a4 = symbol4;
140
+ var name4 = "AI_InvalidPromptError";
141
+ var marker5 = `vercel.ai.error.${name4}`;
142
+ var symbol5 = Symbol.for(marker5);
143
+ var _a5;
144
+ var InvalidPromptError = class extends AISDKError {
145
+ constructor({
146
+ prompt,
147
+ message,
148
+ cause
149
+ }) {
150
+ super({ name: name4, message: `Invalid prompt: ${message}`, cause });
151
+ this[_a5] = true;
152
+ this.prompt = prompt;
153
+ }
154
+ static isInstance(error) {
155
+ return AISDKError.hasMarker(error, marker5);
156
+ }
157
+ };
158
+ _a5 = symbol5;
159
+ var name5 = "AI_InvalidResponseDataError";
160
+ var marker6 = `vercel.ai.error.${name5}`;
161
+ var symbol6 = Symbol.for(marker6);
162
+ var _a6;
163
+ var InvalidResponseDataError = class extends AISDKError {
164
+ constructor({
165
+ data,
166
+ message = `Invalid response data: ${JSON.stringify(data)}.`
167
+ }) {
168
+ super({ name: name5, message });
169
+ this[_a6] = true;
170
+ this.data = data;
171
+ }
172
+ static isInstance(error) {
173
+ return AISDKError.hasMarker(error, marker6);
174
+ }
175
+ };
176
+ _a6 = symbol6;
177
+ var name6 = "AI_JSONParseError";
178
+ var marker7 = `vercel.ai.error.${name6}`;
179
+ var symbol7 = Symbol.for(marker7);
180
+ var _a7;
181
+ var JSONParseError = class extends AISDKError {
182
+ constructor({ text, cause }) {
183
+ super({
184
+ name: name6,
185
+ message: `JSON parsing failed: Text: ${text}.
186
+ Error message: ${getErrorMessage(cause)}`,
187
+ cause
188
+ });
189
+ this[_a7] = true;
190
+ this.text = text;
191
+ }
192
+ static isInstance(error) {
193
+ return AISDKError.hasMarker(error, marker7);
194
+ }
195
+ };
196
+ _a7 = symbol7;
197
+ var name7 = "AI_LoadAPIKeyError";
198
+ var marker8 = `vercel.ai.error.${name7}`;
199
+ var symbol8 = Symbol.for(marker8);
200
+ var _a8;
201
+ _a8 = symbol8;
202
+ var name8 = "AI_LoadSettingError";
203
+ var marker9 = `vercel.ai.error.${name8}`;
204
+ var symbol9 = Symbol.for(marker9);
205
+ var _a9;
206
+ _a9 = symbol9;
207
+ var name9 = "AI_NoContentGeneratedError";
208
+ var marker10 = `vercel.ai.error.${name9}`;
209
+ var symbol10 = Symbol.for(marker10);
210
+ var _a10;
211
+ _a10 = symbol10;
212
+ var name10 = "AI_NoSuchModelError";
213
+ var marker11 = `vercel.ai.error.${name10}`;
214
+ var symbol11 = Symbol.for(marker11);
215
+ var _a11;
216
+ _a11 = symbol11;
217
+ var name11 = "AI_TooManyEmbeddingValuesForCallError";
218
+ var marker12 = `vercel.ai.error.${name11}`;
219
+ var symbol12 = Symbol.for(marker12);
220
+ var _a12;
221
+ _a12 = symbol12;
222
+ var name12 = "AI_TypeValidationError";
223
+ var marker13 = `vercel.ai.error.${name12}`;
224
+ var symbol13 = Symbol.for(marker13);
225
+ var _a13;
226
+ var _TypeValidationError = class _TypeValidationError2 extends AISDKError {
227
+ constructor({ value, cause }) {
228
+ super({
229
+ name: name12,
230
+ message: `Type validation failed: Value: ${JSON.stringify(value)}.
231
+ Error message: ${getErrorMessage(cause)}`,
232
+ cause
233
+ });
234
+ this[_a13] = true;
235
+ this.value = value;
236
+ }
237
+ static isInstance(error) {
238
+ return AISDKError.hasMarker(error, marker13);
239
+ }
240
+ /**
241
+ * Wraps an error into a TypeValidationError.
242
+ * If the cause is already a TypeValidationError with the same value, it returns the cause.
243
+ * Otherwise, it creates a new TypeValidationError.
244
+ *
245
+ * @param {Object} params - The parameters for wrapping the error.
246
+ * @param {unknown} params.value - The value that failed validation.
247
+ * @param {unknown} params.cause - The original error or cause of the validation failure.
248
+ * @returns {TypeValidationError} A TypeValidationError instance.
249
+ */
250
+ static wrap({
251
+ value,
252
+ cause
253
+ }) {
254
+ return _TypeValidationError2.isInstance(cause) && cause.value === value ? cause : new _TypeValidationError2({ value, cause });
255
+ }
256
+ };
257
+ _a13 = symbol13;
258
+ var TypeValidationError = _TypeValidationError;
259
+ var name13 = "AI_UnsupportedFunctionalityError";
260
+ var marker14 = `vercel.ai.error.${name13}`;
261
+ var symbol14 = Symbol.for(marker14);
262
+ var _a14;
263
+ var UnsupportedFunctionalityError = class extends AISDKError {
264
+ constructor({
265
+ functionality,
266
+ message = `'${functionality}' functionality not supported.`
267
+ }) {
268
+ super({ name: name13, message });
269
+ this[_a14] = true;
270
+ this.functionality = functionality;
271
+ }
272
+ static isInstance(error) {
273
+ return AISDKError.hasMarker(error, marker14);
274
+ }
275
+ };
276
+ _a14 = symbol14;
277
+
278
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/index.js
279
+ var ParseError = class extends Error {
280
+ constructor(message, options) {
281
+ super(message), this.name = "ParseError", this.type = options.type, this.field = options.field, this.value = options.value, this.line = options.line;
282
+ }
283
+ };
284
+ function noop(_arg) {
285
+ }
286
+ function createParser(callbacks) {
287
+ if (typeof callbacks == "function")
288
+ throw new TypeError(
289
+ "`callbacks` must be an object, got a function instead. Did you mean `{onEvent: fn}`?"
290
+ );
291
+ const { onEvent = noop, onError = noop, onRetry = noop, onComment } = callbacks;
292
+ let incompleteLine = "", isFirstChunk = true, id, data = "", eventType = "";
293
+ function feed(newChunk) {
294
+ const chunk = isFirstChunk ? newChunk.replace(/^\xEF\xBB\xBF/, "") : newChunk, [complete, incomplete] = splitLines(`${incompleteLine}${chunk}`);
295
+ for (const line of complete)
296
+ parseLine(line);
297
+ incompleteLine = incomplete, isFirstChunk = false;
298
+ }
299
+ function parseLine(line) {
300
+ if (line === "") {
301
+ dispatchEvent();
302
+ return;
303
+ }
304
+ if (line.startsWith(":")) {
305
+ onComment && onComment(line.slice(line.startsWith(": ") ? 2 : 1));
306
+ return;
307
+ }
308
+ const fieldSeparatorIndex = line.indexOf(":");
309
+ if (fieldSeparatorIndex !== -1) {
310
+ const field = line.slice(0, fieldSeparatorIndex), offset = line[fieldSeparatorIndex + 1] === " " ? 2 : 1, value = line.slice(fieldSeparatorIndex + offset);
311
+ processField(field, value, line);
312
+ return;
313
+ }
314
+ processField(line, "", line);
315
+ }
316
+ function processField(field, value, line) {
317
+ switch (field) {
318
+ case "event":
319
+ eventType = value;
320
+ break;
321
+ case "data":
322
+ data = `${data}${value}
323
+ `;
324
+ break;
325
+ case "id":
326
+ id = value.includes("\0") ? void 0 : value;
327
+ break;
328
+ case "retry":
329
+ /^\d+$/.test(value) ? onRetry(parseInt(value, 10)) : onError(
330
+ new ParseError(`Invalid \`retry\` value: "${value}"`, {
331
+ type: "invalid-retry",
332
+ value,
333
+ line
334
+ })
335
+ );
336
+ break;
337
+ default:
338
+ onError(
339
+ new ParseError(
340
+ `Unknown field "${field.length > 20 ? `${field.slice(0, 20)}\u2026` : field}"`,
341
+ { type: "unknown-field", field, value, line }
342
+ )
343
+ );
344
+ break;
345
+ }
346
+ }
347
+ function dispatchEvent() {
348
+ data.length > 0 && onEvent({
349
+ id,
350
+ event: eventType || void 0,
351
+ // If the data buffer's last character is a U+000A LINE FEED (LF) character,
352
+ // then remove the last character from the data buffer.
353
+ data: data.endsWith(`
354
+ `) ? data.slice(0, -1) : data
355
+ }), id = void 0, data = "", eventType = "";
356
+ }
357
+ function reset(options = {}) {
358
+ incompleteLine && options.consume && parseLine(incompleteLine), isFirstChunk = true, id = void 0, data = "", eventType = "", incompleteLine = "";
359
+ }
360
+ return { feed, reset };
361
+ }
362
+ function splitLines(chunk) {
363
+ const lines = [];
364
+ let incompleteLine = "", searchIndex = 0;
365
+ for (; searchIndex < chunk.length; ) {
366
+ const crIndex = chunk.indexOf("\r", searchIndex), lfIndex = chunk.indexOf(`
367
+ `, searchIndex);
368
+ let lineEnd = -1;
369
+ if (crIndex !== -1 && lfIndex !== -1 ? lineEnd = Math.min(crIndex, lfIndex) : crIndex !== -1 ? lineEnd = crIndex : lfIndex !== -1 && (lineEnd = lfIndex), lineEnd === -1) {
370
+ incompleteLine = chunk.slice(searchIndex);
371
+ break;
372
+ } else {
373
+ const line = chunk.slice(searchIndex, lineEnd);
374
+ lines.push(line), searchIndex = lineEnd + 1, chunk[searchIndex - 1] === "\r" && chunk[searchIndex] === `
375
+ ` && searchIndex++;
376
+ }
377
+ }
378
+ return [lines, incompleteLine];
379
+ }
380
+
381
+ // node_modules/.pnpm/eventsource-parser@3.0.3/node_modules/eventsource-parser/dist/stream.js
382
+ var EventSourceParserStream = class extends TransformStream {
383
+ constructor({ onError, onRetry, onComment } = {}) {
384
+ let parser;
385
+ super({
386
+ start(controller) {
387
+ parser = createParser({
388
+ onEvent: (event) => {
389
+ controller.enqueue(event);
390
+ },
391
+ onError(error) {
392
+ onError === "terminate" ? controller.error(error) : typeof onError == "function" && onError(error);
393
+ },
394
+ onRetry,
395
+ onComment
396
+ });
397
+ },
398
+ transform(chunk) {
399
+ parser.feed(chunk);
400
+ }
401
+ });
402
+ }
403
+ };
404
+
405
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
406
+ import * as z4 from "zod/v4";
407
+
408
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/Options.js
409
+ var ignoreOverride = Symbol("Let zodToJsonSchema decide on which parser to use");
410
+
411
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/selectParser.js
412
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind3 } from "zod";
413
+
414
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/array.js
415
+ import { ZodFirstPartyTypeKind } from "zod";
416
+
417
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/record.js
418
+ import { ZodFirstPartyTypeKind as ZodFirstPartyTypeKind2 } from "zod";
419
+
420
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/string.js
421
+ var ALPHA_NUMERIC = new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789");
422
+
423
+ // node_modules/.pnpm/zod-to-json-schema@3.24.5_zod@3.25.74/node_modules/zod-to-json-schema/dist/esm/parsers/object.js
424
+ import { ZodOptional } from "zod";
425
+
426
+ // node_modules/.pnpm/@ai-sdk+provider-utils@3.0.0-beta.2_zod@3.25.74/node_modules/@ai-sdk/provider-utils/dist/index.mjs
427
+ function combineHeaders(...headers) {
428
+ return headers.reduce(
429
+ (combinedHeaders, currentHeaders) => __spreadValues(__spreadValues({}, combinedHeaders), currentHeaders != null ? currentHeaders : {}),
430
+ {}
431
+ );
432
+ }
433
+ function extractResponseHeaders(response) {
434
+ return Object.fromEntries([...response.headers]);
435
+ }
436
+ var createIdGenerator = ({
437
+ prefix,
438
+ size = 16,
439
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
440
+ separator = "-"
441
+ } = {}) => {
442
+ const generator = () => {
443
+ const alphabetLength = alphabet.length;
444
+ const chars = new Array(size);
445
+ for (let i = 0; i < size; i++) {
446
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
447
+ }
448
+ return chars.join("");
449
+ };
450
+ if (prefix == null) {
451
+ return generator;
452
+ }
453
+ if (alphabet.includes(separator)) {
454
+ throw new InvalidArgumentError({
455
+ argument: "separator",
456
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
457
+ });
458
+ }
459
+ return () => `${prefix}${separator}${generator()}`;
460
+ };
461
+ var generateId = createIdGenerator();
462
+ function isAbortError(error) {
463
+ return error instanceof Error && (error.name === "AbortError" || error.name === "TimeoutError");
464
+ }
465
+ var FETCH_FAILED_ERROR_MESSAGES = ["fetch failed", "failed to fetch"];
466
+ function handleFetchError({
467
+ error,
468
+ url,
469
+ requestBodyValues
470
+ }) {
471
+ if (isAbortError(error)) {
472
+ return error;
473
+ }
474
+ if (error instanceof TypeError && FETCH_FAILED_ERROR_MESSAGES.includes(error.message.toLowerCase())) {
475
+ const cause = error.cause;
476
+ if (cause != null) {
477
+ return new APICallError({
478
+ message: `Cannot connect to API: ${cause.message}`,
479
+ cause,
480
+ url,
481
+ requestBodyValues,
482
+ isRetryable: true
483
+ // retry when network error
484
+ });
485
+ }
486
+ }
487
+ return error;
488
+ }
489
+ function removeUndefinedEntries(record) {
490
+ return Object.fromEntries(
491
+ Object.entries(record).filter(([_key, value]) => value != null)
492
+ );
493
+ }
494
+ var suspectProtoRx = /"__proto__"\s*:/;
495
+ var suspectConstructorRx = /"constructor"\s*:/;
496
+ function _parse(text) {
497
+ const obj = JSON.parse(text);
498
+ if (obj === null || typeof obj !== "object") {
499
+ return obj;
500
+ }
501
+ if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
502
+ return obj;
503
+ }
504
+ return filter(obj);
505
+ }
506
+ function filter(obj) {
507
+ let next = [obj];
508
+ while (next.length) {
509
+ const nodes = next;
510
+ next = [];
511
+ for (const node of nodes) {
512
+ if (Object.prototype.hasOwnProperty.call(node, "__proto__")) {
513
+ throw new SyntaxError("Object contains forbidden prototype property");
514
+ }
515
+ if (Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
516
+ throw new SyntaxError("Object contains forbidden prototype property");
517
+ }
518
+ for (const key in node) {
519
+ const value = node[key];
520
+ if (value && typeof value === "object") {
521
+ next.push(value);
522
+ }
523
+ }
524
+ }
525
+ }
526
+ return obj;
527
+ }
528
+ function secureJsonParse(text) {
529
+ const { stackTraceLimit } = Error;
530
+ Error.stackTraceLimit = 0;
531
+ try {
532
+ return _parse(text);
533
+ } finally {
534
+ Error.stackTraceLimit = stackTraceLimit;
535
+ }
536
+ }
537
+ var validatorSymbol = Symbol.for("vercel.ai.validator");
538
+ function validator(validate) {
539
+ return { [validatorSymbol]: true, validate };
540
+ }
541
+ function isValidator(value) {
542
+ return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
543
+ }
544
+ function asValidator(value) {
545
+ return isValidator(value) ? value : standardSchemaValidator(value);
546
+ }
547
+ function standardSchemaValidator(standardSchema) {
548
+ return validator(async (value) => {
549
+ const result = await standardSchema["~standard"].validate(value);
550
+ return result.issues == null ? { success: true, value: result.value } : {
551
+ success: false,
552
+ error: new TypeValidationError({
553
+ value,
554
+ cause: result.issues
555
+ })
556
+ };
557
+ });
558
+ }
559
+ async function validateTypes({
560
+ value,
561
+ schema
562
+ }) {
563
+ const result = await safeValidateTypes({ value, schema });
564
+ if (!result.success) {
565
+ throw TypeValidationError.wrap({ value, cause: result.error });
566
+ }
567
+ return result.value;
568
+ }
569
+ async function safeValidateTypes({
570
+ value,
571
+ schema
572
+ }) {
573
+ const validator2 = asValidator(schema);
574
+ try {
575
+ if (validator2.validate == null) {
576
+ return { success: true, value, rawValue: value };
577
+ }
578
+ const result = await validator2.validate(value);
579
+ if (result.success) {
580
+ return { success: true, value: result.value, rawValue: value };
581
+ }
582
+ return {
583
+ success: false,
584
+ error: TypeValidationError.wrap({ value, cause: result.error }),
585
+ rawValue: value
586
+ };
587
+ } catch (error) {
588
+ return {
589
+ success: false,
590
+ error: TypeValidationError.wrap({ value, cause: error }),
591
+ rawValue: value
592
+ };
593
+ }
594
+ }
595
+ async function parseJSON({
596
+ text,
597
+ schema
598
+ }) {
599
+ try {
600
+ const value = secureJsonParse(text);
601
+ if (schema == null) {
602
+ return value;
603
+ }
604
+ return validateTypes({ value, schema });
605
+ } catch (error) {
606
+ if (JSONParseError.isInstance(error) || TypeValidationError.isInstance(error)) {
607
+ throw error;
29
608
  }
30
- return target;
609
+ throw new JSONParseError({ text, cause: error });
610
+ }
611
+ }
612
+ async function safeParseJSON({
613
+ text,
614
+ schema
615
+ }) {
616
+ try {
617
+ const value = secureJsonParse(text);
618
+ if (schema == null) {
619
+ return { success: true, value, rawValue: value };
620
+ }
621
+ return await safeValidateTypes({ value, schema });
622
+ } catch (error) {
623
+ return {
624
+ success: false,
625
+ error: JSONParseError.isInstance(error) ? error : new JSONParseError({ text, cause: error }),
626
+ rawValue: void 0
627
+ };
628
+ }
629
+ }
630
+ function isParsableJson(input) {
631
+ try {
632
+ secureJsonParse(input);
633
+ return true;
634
+ } catch (e) {
635
+ return false;
636
+ }
637
+ }
638
+ function parseJsonEventStream({
639
+ stream,
640
+ schema
641
+ }) {
642
+ return stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
643
+ new TransformStream({
644
+ async transform({ data }, controller) {
645
+ if (data === "[DONE]") {
646
+ return;
647
+ }
648
+ controller.enqueue(await safeParseJSON({ text: data, schema }));
649
+ }
650
+ })
651
+ );
652
+ }
653
+ var getOriginalFetch2 = () => globalThis.fetch;
654
+ var postJsonToApi = async ({
655
+ url,
656
+ headers,
657
+ body,
658
+ failedResponseHandler,
659
+ successfulResponseHandler,
660
+ abortSignal,
661
+ fetch
662
+ }) => postToApi({
663
+ url,
664
+ headers: __spreadValues({
665
+ "Content-Type": "application/json"
666
+ }, headers),
667
+ body: {
668
+ content: JSON.stringify(body),
669
+ values: body
670
+ },
671
+ failedResponseHandler,
672
+ successfulResponseHandler,
673
+ abortSignal,
674
+ fetch
675
+ });
676
+ var postToApi = async ({
677
+ url,
678
+ headers = {},
679
+ body,
680
+ successfulResponseHandler,
681
+ failedResponseHandler,
682
+ abortSignal,
683
+ fetch = getOriginalFetch2()
684
+ }) => {
685
+ try {
686
+ const response = await fetch(url, {
687
+ method: "POST",
688
+ headers: removeUndefinedEntries(headers),
689
+ body: body.content,
690
+ signal: abortSignal
691
+ });
692
+ const responseHeaders = extractResponseHeaders(response);
693
+ if (!response.ok) {
694
+ let errorInformation;
695
+ try {
696
+ errorInformation = await failedResponseHandler({
697
+ response,
698
+ url,
699
+ requestBodyValues: body.values
700
+ });
701
+ } catch (error) {
702
+ if (isAbortError(error) || APICallError.isInstance(error)) {
703
+ throw error;
704
+ }
705
+ throw new APICallError({
706
+ message: "Failed to process error response",
707
+ cause: error,
708
+ statusCode: response.status,
709
+ url,
710
+ responseHeaders,
711
+ requestBodyValues: body.values
712
+ });
713
+ }
714
+ throw errorInformation.value;
715
+ }
716
+ try {
717
+ return await successfulResponseHandler({
718
+ response,
719
+ url,
720
+ requestBodyValues: body.values
721
+ });
722
+ } catch (error) {
723
+ if (error instanceof Error) {
724
+ if (isAbortError(error) || APICallError.isInstance(error)) {
725
+ throw error;
726
+ }
727
+ }
728
+ throw new APICallError({
729
+ message: "Failed to process successful response",
730
+ cause: error,
731
+ statusCode: response.status,
732
+ url,
733
+ responseHeaders,
734
+ requestBodyValues: body.values
735
+ });
736
+ }
737
+ } catch (error) {
738
+ throw handleFetchError({ error, url, requestBodyValues: body.values });
739
+ }
740
+ };
741
+ var createJsonErrorResponseHandler = ({
742
+ errorSchema,
743
+ errorToMessage,
744
+ isRetryable
745
+ }) => async ({ response, url, requestBodyValues }) => {
746
+ const responseBody = await response.text();
747
+ const responseHeaders = extractResponseHeaders(response);
748
+ if (responseBody.trim() === "") {
749
+ return {
750
+ responseHeaders,
751
+ value: new APICallError({
752
+ message: response.statusText,
753
+ url,
754
+ requestBodyValues,
755
+ statusCode: response.status,
756
+ responseHeaders,
757
+ responseBody,
758
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
759
+ })
760
+ };
761
+ }
762
+ try {
763
+ const parsedError = await parseJSON({
764
+ text: responseBody,
765
+ schema: errorSchema
766
+ });
767
+ return {
768
+ responseHeaders,
769
+ value: new APICallError({
770
+ message: errorToMessage(parsedError),
771
+ url,
772
+ requestBodyValues,
773
+ statusCode: response.status,
774
+ responseHeaders,
775
+ responseBody,
776
+ data: parsedError,
777
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
778
+ })
779
+ };
780
+ } catch (parseError) {
781
+ return {
782
+ responseHeaders,
783
+ value: new APICallError({
784
+ message: response.statusText,
785
+ url,
786
+ requestBodyValues,
787
+ statusCode: response.status,
788
+ responseHeaders,
789
+ responseBody,
790
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
791
+ })
792
+ };
793
+ }
31
794
  };
795
+ var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
796
+ const responseHeaders = extractResponseHeaders(response);
797
+ if (response.body == null) {
798
+ throw new EmptyResponseBodyError({});
799
+ }
800
+ return {
801
+ responseHeaders,
802
+ value: parseJsonEventStream({
803
+ stream: response.body,
804
+ schema: chunkSchema
805
+ })
806
+ };
807
+ };
808
+ var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
809
+ const responseBody = await response.text();
810
+ const parsedResult = await safeParseJSON({
811
+ text: responseBody,
812
+ schema: responseSchema
813
+ });
814
+ const responseHeaders = extractResponseHeaders(response);
815
+ if (!parsedResult.success) {
816
+ throw new APICallError({
817
+ message: "Invalid JSON response",
818
+ cause: parsedResult.error,
819
+ statusCode: response.status,
820
+ responseHeaders,
821
+ responseBody,
822
+ url,
823
+ requestBodyValues
824
+ });
825
+ }
826
+ return {
827
+ responseHeaders,
828
+ value: parsedResult.value,
829
+ rawValue: parsedResult.rawValue
830
+ };
831
+ };
832
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
833
+ var { btoa, atob } = globalThis;
834
+ function convertUint8ArrayToBase64(array) {
835
+ let latin1string = "";
836
+ for (let i = 0; i < array.length; i++) {
837
+ latin1string += String.fromCodePoint(array[i]);
838
+ }
839
+ return btoa(latin1string);
840
+ }
32
841
 
33
842
  // src/schemas/reasoning-details.ts
34
- import { z } from "zod";
843
+ import { z } from "zod/v4";
35
844
  var ReasoningDetailSummarySchema = z.object({
36
845
  type: z.literal("reasoning.summary" /* Summary */),
37
846
  summary: z.string()
@@ -56,56 +865,79 @@ var ReasoningDetailsWithUnknownSchema = z.union([
56
865
  ]);
57
866
  var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
58
867
 
59
- // src/openrouter-chat-language-model.ts
60
- import {
61
- InvalidResponseDataError,
62
- UnsupportedFunctionalityError
63
- } from "@ai-sdk/provider";
64
- import {
65
- combineHeaders,
66
- createEventSourceResponseHandler,
67
- createJsonResponseHandler,
68
- generateId,
69
- isParsableJson,
70
- postJsonToApi
71
- } from "@ai-sdk/provider-utils";
72
- import { z as z3 } from "zod";
868
+ // src/schemas/error-response.ts
869
+ import { z as z2 } from "zod/v4";
870
+ var OpenRouterErrorResponseSchema = z2.object({
871
+ error: z2.object({
872
+ code: z2.union([z2.string(), z2.number()]).nullable(),
873
+ message: z2.string(),
874
+ type: z2.string().nullable(),
875
+ param: z2.any().nullable()
876
+ })
877
+ });
878
+ var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
879
+ errorSchema: OpenRouterErrorResponseSchema,
880
+ errorToMessage: (data) => data.error.message
881
+ });
73
882
 
74
- // src/convert-to-openrouter-chat-messages.ts
75
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
883
+ // src/utils/map-finish-reason.ts
884
+ function mapOpenRouterFinishReason(finishReason) {
885
+ switch (finishReason) {
886
+ case "stop":
887
+ return "stop";
888
+ case "length":
889
+ return "length";
890
+ case "content_filter":
891
+ return "content-filter";
892
+ case "function_call":
893
+ case "tool_calls":
894
+ return "tool-calls";
895
+ default:
896
+ return "unknown";
897
+ }
898
+ }
899
+
900
+ // src/chat/convert-to-openrouter-chat-messages.ts
76
901
  function getCacheControl(providerMetadata) {
77
- var _a, _b, _c;
902
+ var _a15, _b, _c;
78
903
  const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
79
904
  const openrouter = providerMetadata == null ? void 0 : providerMetadata.openrouter;
80
- return (_c = (_b = (_a = openrouter == null ? void 0 : openrouter.cacheControl) != null ? _a : openrouter == null ? void 0 : openrouter.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
905
+ return (_c = (_b = (_a15 = openrouter == null ? void 0 : openrouter.cacheControl) != null ? _a15 : openrouter == null ? void 0 : openrouter.cache_control) != null ? _b : anthropic == null ? void 0 : anthropic.cacheControl) != null ? _c : anthropic == null ? void 0 : anthropic.cache_control;
81
906
  }
82
907
  function convertToOpenRouterChatMessages(prompt) {
83
- var _a, _b, _c;
908
+ var _a15, _b, _c;
84
909
  const messages = [];
85
- for (const { role, content, providerMetadata } of prompt) {
910
+ for (const { role, content, providerOptions } of prompt) {
86
911
  switch (role) {
87
912
  case "system": {
88
913
  messages.push({
89
914
  role: "system",
90
915
  content,
91
- cache_control: getCacheControl(providerMetadata)
916
+ cache_control: getCacheControl(providerOptions)
92
917
  });
93
918
  break;
94
919
  }
95
920
  case "user": {
96
- if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
921
+ if (content.length === 1 && ((_a15 = content[0]) == null ? void 0 : _a15.type) === "text") {
922
+ const cacheControl = (_b = getCacheControl(providerOptions)) != null ? _b : getCacheControl(content[0].providerOptions);
923
+ const contentWithCacheControl = cacheControl ? [
924
+ {
925
+ type: "text",
926
+ text: content[0].text,
927
+ cache_control: cacheControl
928
+ }
929
+ ] : content[0].text;
97
930
  messages.push({
98
931
  role: "user",
99
- content: content[0].text,
100
- cache_control: (_b = getCacheControl(providerMetadata)) != null ? _b : getCacheControl(content[0].providerMetadata)
932
+ content: contentWithCacheControl
101
933
  });
102
934
  break;
103
935
  }
104
- const messageCacheControl = getCacheControl(providerMetadata);
936
+ const messageCacheControl = getCacheControl(providerOptions);
105
937
  const contentParts = content.map(
106
938
  (part) => {
107
- var _a2, _b2, _c2, _d;
108
- const cacheControl = (_a2 = getCacheControl(part.providerMetadata)) != null ? _a2 : messageCacheControl;
939
+ var _a16, _b2, _c2, _d, _e, _f, _g;
940
+ const cacheControl = (_a16 = getCacheControl(part.providerOptions)) != null ? _a16 : messageCacheControl;
109
941
  switch (part.type) {
110
942
  case "text":
111
943
  return {
@@ -114,33 +946,35 @@ function convertToOpenRouterChatMessages(prompt) {
114
946
  // For text parts, only use part-specific cache control
115
947
  cache_control: cacheControl
116
948
  };
117
- case "image":
118
- return {
119
- type: "image_url",
120
- image_url: {
121
- url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
122
- part.image
123
- )}`
124
- },
125
- // For image parts, use part-specific or message-level cache control
126
- cache_control: cacheControl
127
- };
128
949
  case "file":
950
+ if ((_b2 = part.mediaType) == null ? void 0 : _b2.startsWith("image/")) {
951
+ return {
952
+ type: "image_url",
953
+ image_url: {
954
+ url: part.data instanceof URL ? part.data.toString() : `data:${(_c2 = part.mediaType) != null ? _c2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
955
+ part.data instanceof Uint8Array ? part.data : new Uint8Array()
956
+ )}`
957
+ },
958
+ // For image parts, use part-specific or message-level cache control
959
+ cache_control: cacheControl
960
+ };
961
+ }
129
962
  return {
130
963
  type: "file",
131
964
  file: {
132
965
  filename: String(
133
- (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.openrouter) == null ? void 0 : _d.filename
966
+ (_g = (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openrouter) == null ? void 0 : _e.filename) != null ? _f : part.filename) != null ? _g : ""
134
967
  ),
135
- file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
968
+ file_data: part.data instanceof Uint8Array ? `data:${part.mediaType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mediaType};base64,${part.data}`
136
969
  },
137
970
  cache_control: cacheControl
138
971
  };
139
972
  default: {
140
- const _exhaustiveCheck = part;
141
- throw new Error(
142
- `Unsupported content part type: ${_exhaustiveCheck}`
143
- );
973
+ return {
974
+ type: "text",
975
+ text: "",
976
+ cache_control: cacheControl
977
+ };
144
978
  }
145
979
  }
146
980
  }
@@ -168,7 +1002,7 @@ function convertToOpenRouterChatMessages(prompt) {
168
1002
  type: "function",
169
1003
  function: {
170
1004
  name: part.toolName,
171
- arguments: JSON.stringify(part.args)
1005
+ arguments: JSON.stringify(part.input)
172
1006
  }
173
1007
  });
174
1008
  break;
@@ -177,23 +1011,14 @@ function convertToOpenRouterChatMessages(prompt) {
177
1011
  reasoning += part.text;
178
1012
  reasoningDetails.push({
179
1013
  type: "reasoning.text" /* Text */,
180
- text: part.text,
181
- signature: part.signature
182
- });
183
- break;
184
- }
185
- case "redacted-reasoning": {
186
- reasoningDetails.push({
187
- type: "reasoning.encrypted" /* Encrypted */,
188
- data: part.data
1014
+ text: part.text
189
1015
  });
190
1016
  break;
191
1017
  }
192
1018
  case "file":
193
1019
  break;
194
1020
  default: {
195
- const _exhaustiveCheck = part;
196
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
1021
+ break;
197
1022
  }
198
1023
  }
199
1024
  }
@@ -203,95 +1028,185 @@ function convertToOpenRouterChatMessages(prompt) {
203
1028
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
204
1029
  reasoning: reasoning || void 0,
205
1030
  reasoning_details: reasoningDetails.length > 0 ? reasoningDetails : void 0,
206
- cache_control: getCacheControl(providerMetadata)
1031
+ cache_control: getCacheControl(providerOptions)
207
1032
  });
208
1033
  break;
209
1034
  }
210
1035
  case "tool": {
211
1036
  for (const toolResponse of content) {
1037
+ const content2 = getToolResultContent(toolResponse);
212
1038
  messages.push({
213
1039
  role: "tool",
214
1040
  tool_call_id: toolResponse.toolCallId,
215
- content: JSON.stringify(toolResponse.result),
216
- cache_control: (_c = getCacheControl(providerMetadata)) != null ? _c : getCacheControl(toolResponse.providerMetadata)
1041
+ content: content2,
1042
+ cache_control: (_c = getCacheControl(providerOptions)) != null ? _c : getCacheControl(toolResponse.providerOptions)
217
1043
  });
218
1044
  }
219
1045
  break;
220
1046
  }
221
1047
  default: {
222
- const _exhaustiveCheck = role;
223
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1048
+ break;
224
1049
  }
225
1050
  }
226
1051
  }
227
1052
  return messages;
228
1053
  }
229
-
230
- // src/map-openrouter-chat-logprobs.ts
231
- function mapOpenRouterChatLogProbsOutput(logprobs) {
232
- var _a, _b;
233
- return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
234
- token,
235
- logprob,
236
- topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
237
- token: token2,
238
- logprob: logprob2
239
- })) : []
240
- }))) != null ? _b : void 0;
1054
+ function getToolResultContent(input) {
1055
+ return input.output.type === "text" ? input.output.value : JSON.stringify(input.output.value);
241
1056
  }
242
1057
 
243
- // src/map-openrouter-finish-reason.ts
244
- function mapOpenRouterFinishReason(finishReason) {
245
- switch (finishReason) {
246
- case "stop":
247
- return "stop";
248
- case "length":
249
- return "length";
250
- case "content_filter":
251
- return "content-filter";
252
- case "function_call":
253
- case "tool_calls":
254
- return "tool-calls";
255
- default:
256
- return "unknown";
1058
+ // src/chat/get-tool-choice.ts
1059
+ import { z as z3 } from "zod/v4";
1060
+ var ChatCompletionToolChoiceSchema = z3.union([
1061
+ z3.literal("auto"),
1062
+ z3.literal("none"),
1063
+ z3.literal("required"),
1064
+ z3.object({
1065
+ type: z3.literal("function"),
1066
+ function: z3.object({
1067
+ name: z3.string()
1068
+ })
1069
+ })
1070
+ ]);
1071
+ function getChatCompletionToolChoice(toolChoice) {
1072
+ switch (toolChoice.type) {
1073
+ case "auto":
1074
+ case "none":
1075
+ case "required":
1076
+ return toolChoice.type;
1077
+ case "tool": {
1078
+ return {
1079
+ type: "function",
1080
+ function: { name: toolChoice.toolName }
1081
+ };
1082
+ }
1083
+ default: {
1084
+ toolChoice;
1085
+ throw new Error(`Invalid tool choice type: ${toolChoice}`);
1086
+ }
257
1087
  }
258
1088
  }
259
1089
 
260
- // src/openrouter-error.ts
261
- import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
262
- import { z as z2 } from "zod";
263
- var OpenRouterErrorResponseSchema = z2.object({
264
- error: z2.object({
265
- code: z2.union([z2.string(), z2.number()]).nullable(),
266
- message: z2.string(),
267
- type: z2.string().nullable(),
268
- param: z2.any().nullable()
269
- })
1090
+ // src/chat/schemas.ts
1091
+ import { z as z5 } from "zod/v4";
1092
+ var OpenRouterChatCompletionBaseResponseSchema = z5.object({
1093
+ id: z5.string().optional(),
1094
+ model: z5.string().optional(),
1095
+ usage: z5.object({
1096
+ prompt_tokens: z5.number(),
1097
+ prompt_tokens_details: z5.object({
1098
+ cached_tokens: z5.number()
1099
+ }).nullish(),
1100
+ completion_tokens: z5.number(),
1101
+ completion_tokens_details: z5.object({
1102
+ reasoning_tokens: z5.number()
1103
+ }).nullish(),
1104
+ total_tokens: z5.number(),
1105
+ cost: z5.number().optional()
1106
+ }).nullish()
270
1107
  });
271
- var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
272
- errorSchema: OpenRouterErrorResponseSchema,
273
- errorToMessage: (data) => data.error.message
1108
+ var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
1109
+ choices: z5.array(
1110
+ z5.object({
1111
+ message: z5.object({
1112
+ role: z5.literal("assistant"),
1113
+ content: z5.string().nullable().optional(),
1114
+ reasoning: z5.string().nullable().optional(),
1115
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1116
+ tool_calls: z5.array(
1117
+ z5.object({
1118
+ id: z5.string().optional().nullable(),
1119
+ type: z5.literal("function"),
1120
+ function: z5.object({
1121
+ name: z5.string(),
1122
+ arguments: z5.string()
1123
+ })
1124
+ })
1125
+ ).optional()
1126
+ }),
1127
+ index: z5.number().nullish(),
1128
+ logprobs: z5.object({
1129
+ content: z5.array(
1130
+ z5.object({
1131
+ token: z5.string(),
1132
+ logprob: z5.number(),
1133
+ top_logprobs: z5.array(
1134
+ z5.object({
1135
+ token: z5.string(),
1136
+ logprob: z5.number()
1137
+ })
1138
+ )
1139
+ })
1140
+ ).nullable()
1141
+ }).nullable().optional(),
1142
+ finish_reason: z5.string().optional().nullable()
1143
+ })
1144
+ )
274
1145
  });
1146
+ var OpenRouterStreamChatCompletionChunkSchema = z5.union([
1147
+ OpenRouterChatCompletionBaseResponseSchema.extend({
1148
+ choices: z5.array(
1149
+ z5.object({
1150
+ delta: z5.object({
1151
+ role: z5.enum(["assistant"]).optional(),
1152
+ content: z5.string().nullish(),
1153
+ reasoning: z5.string().nullish().optional(),
1154
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1155
+ tool_calls: z5.array(
1156
+ z5.object({
1157
+ index: z5.number().nullish(),
1158
+ id: z5.string().nullish(),
1159
+ type: z5.literal("function").optional(),
1160
+ function: z5.object({
1161
+ name: z5.string().nullish(),
1162
+ arguments: z5.string().nullish()
1163
+ })
1164
+ })
1165
+ ).nullish()
1166
+ }).nullish(),
1167
+ logprobs: z5.object({
1168
+ content: z5.array(
1169
+ z5.object({
1170
+ token: z5.string(),
1171
+ logprob: z5.number(),
1172
+ top_logprobs: z5.array(
1173
+ z5.object({
1174
+ token: z5.string(),
1175
+ logprob: z5.number()
1176
+ })
1177
+ )
1178
+ })
1179
+ ).nullable()
1180
+ }).nullish(),
1181
+ finish_reason: z5.string().nullable().optional(),
1182
+ index: z5.number().nullish()
1183
+ })
1184
+ )
1185
+ }),
1186
+ OpenRouterErrorResponseSchema
1187
+ ]);
275
1188
 
276
- // src/openrouter-chat-language-model.ts
277
- function isFunctionTool(tool) {
278
- return "parameters" in tool;
279
- }
1189
+ // src/chat/index.ts
280
1190
  var OpenRouterChatLanguageModel = class {
281
1191
  constructor(modelId, settings, config) {
282
- this.specificationVersion = "v1";
1192
+ this.specificationVersion = "v2";
1193
+ this.provider = "openrouter";
283
1194
  this.defaultObjectGenerationMode = "tool";
1195
+ this.supportedUrls = {
1196
+ "image/*": [
1197
+ /^data:image\/[a-zA-Z]+;base64,/,
1198
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1199
+ ],
1200
+ // 'text/*': [/^data:text\//, /^https?:\/\/.+$/],
1201
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1202
+ };
284
1203
  this.modelId = modelId;
285
1204
  this.settings = settings;
286
1205
  this.config = config;
287
1206
  }
288
- get provider() {
289
- return this.config.provider;
290
- }
291
1207
  getArgs({
292
- mode,
293
1208
  prompt,
294
- maxTokens,
1209
+ maxOutputTokens,
295
1210
  temperature,
296
1211
  topP,
297
1212
  frequencyPenalty,
@@ -300,12 +1215,10 @@ var OpenRouterChatLanguageModel = class {
300
1215
  stopSequences,
301
1216
  responseFormat,
302
1217
  topK,
303
- providerMetadata
1218
+ tools,
1219
+ toolChoice
304
1220
  }) {
305
- var _a;
306
- const type = mode.type;
307
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
308
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1221
+ const baseArgs = __spreadValues(__spreadValues({
309
1222
  // model id:
310
1223
  model: this.modelId,
311
1224
  models: this.settings.models,
@@ -316,7 +1229,7 @@ var OpenRouterChatLanguageModel = class {
316
1229
  user: this.settings.user,
317
1230
  parallel_tool_calls: this.settings.parallelToolCalls,
318
1231
  // standardized settings:
319
- max_tokens: maxTokens,
1232
+ max_tokens: maxOutputTokens,
320
1233
  temperature,
321
1234
  top_p: topP,
322
1235
  frequency_penalty: frequencyPenalty,
@@ -331,44 +1244,34 @@ var OpenRouterChatLanguageModel = class {
331
1244
  include_reasoning: this.settings.includeReasoning,
332
1245
  reasoning: this.settings.reasoning,
333
1246
  usage: this.settings.usage
334
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
335
- switch (type) {
336
- case "regular": {
337
- return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
338
- }
339
- case "object-json": {
340
- return __spreadProps(__spreadValues({}, baseArgs), {
341
- response_format: { type: "json_object" }
342
- });
343
- }
344
- case "object-tool": {
345
- return __spreadProps(__spreadValues({}, baseArgs), {
346
- tool_choice: { type: "function", function: { name: mode.tool.name } },
347
- tools: [
348
- {
349
- type: "function",
350
- function: {
351
- name: mode.tool.name,
352
- description: mode.tool.description,
353
- parameters: mode.tool.parameters
354
- }
355
- }
356
- ]
357
- });
358
- }
359
- // Handle all non-text types with a single default case
360
- default: {
361
- const _exhaustiveCheck = type;
362
- throw new UnsupportedFunctionalityError({
363
- functionality: `${_exhaustiveCheck} mode`
364
- });
365
- }
1247
+ }, this.config.extraBody), this.settings.extraBody);
1248
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
1249
+ return __spreadProps(__spreadValues({}, baseArgs), {
1250
+ response_format: { type: "json_object" }
1251
+ });
366
1252
  }
1253
+ if (tools && tools.length > 0) {
1254
+ const mappedTools = tools.filter((tool) => tool.type === "function").map((tool) => ({
1255
+ type: "function",
1256
+ function: {
1257
+ name: tool.name,
1258
+ description: tool.type,
1259
+ parameters: tool.inputSchema
1260
+ }
1261
+ }));
1262
+ return __spreadProps(__spreadValues({}, baseArgs), {
1263
+ tools: mappedTools,
1264
+ tool_choice: toolChoice ? getChatCompletionToolChoice(toolChoice) : void 0
1265
+ });
1266
+ }
1267
+ return baseArgs;
367
1268
  }
368
1269
  async doGenerate(options) {
369
- var _b, _c, _d, _e, _f, _g, _h, _i, _j;
370
- const args = this.getArgs(options);
371
- const { responseHeaders, value: response } = await postJsonToApi({
1270
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t;
1271
+ const providerOptions = options.providerOptions || {};
1272
+ const openrouterOptions = providerOptions.openrouter || {};
1273
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1274
+ const { value: response, responseHeaders } = await postJsonToApi({
372
1275
  url: this.config.url({
373
1276
  path: "/chat/completions",
374
1277
  modelId: this.modelId
@@ -382,46 +1285,33 @@ var OpenRouterChatLanguageModel = class {
382
1285
  abortSignal: options.abortSignal,
383
1286
  fetch: this.config.fetch
384
1287
  });
385
- const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
386
1288
  const choice = response.choices[0];
387
1289
  if (!choice) {
388
1290
  throw new Error("No choice in response");
389
1291
  }
390
1292
  const usageInfo = response.usage ? {
391
- promptTokens: (_b = response.usage.prompt_tokens) != null ? _b : 0,
392
- completionTokens: (_c = response.usage.completion_tokens) != null ? _c : 0
1293
+ inputTokens: (_a15 = response.usage.prompt_tokens) != null ? _a15 : 0,
1294
+ outputTokens: (_b = response.usage.completion_tokens) != null ? _b : 0,
1295
+ totalTokens: ((_c = response.usage.prompt_tokens) != null ? _c : 0) + ((_d = response.usage.completion_tokens) != null ? _d : 0),
1296
+ reasoningTokens: (_f = (_e = response.usage.completion_tokens_details) == null ? void 0 : _e.reasoning_tokens) != null ? _f : 0,
1297
+ cachedInputTokens: (_h = (_g = response.usage.prompt_tokens_details) == null ? void 0 : _g.cached_tokens) != null ? _h : 0
393
1298
  } : {
394
- promptTokens: 0,
395
- completionTokens: 0
1299
+ inputTokens: 0,
1300
+ outputTokens: 0,
1301
+ totalTokens: 0,
1302
+ reasoningTokens: 0,
1303
+ cachedInputTokens: 0
396
1304
  };
397
- const providerMetadata = {};
398
- if (response.usage && ((_d = this.settings.usage) == null ? void 0 : _d.include)) {
399
- providerMetadata.openrouter = {
400
- usage: {
401
- promptTokens: response.usage.prompt_tokens,
402
- promptTokensDetails: response.usage.prompt_tokens_details ? {
403
- cachedTokens: (_e = response.usage.prompt_tokens_details.cached_tokens) != null ? _e : 0
404
- } : void 0,
405
- completionTokens: response.usage.completion_tokens,
406
- completionTokensDetails: response.usage.completion_tokens_details ? {
407
- reasoningTokens: (_f = response.usage.completion_tokens_details.reasoning_tokens) != null ? _f : 0
408
- } : void 0,
409
- cost: response.usage.cost,
410
- totalTokens: (_g = response.usage.total_tokens) != null ? _g : 0
411
- }
412
- };
413
- }
414
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0;
415
- const reasoningDetails = (_h = choice.message.reasoning_details) != null ? _h : [];
416
- const reasoning = reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
417
- var _a2;
1305
+ const reasoningDetails = (_i = choice.message.reasoning_details) != null ? _i : [];
1306
+ reasoningDetails.length > 0 ? reasoningDetails.map((detail) => {
1307
+ var _a16;
418
1308
  switch (detail.type) {
419
1309
  case "reasoning.text" /* Text */: {
420
1310
  if (detail.text) {
421
1311
  return {
422
1312
  type: "text",
423
1313
  text: detail.text,
424
- signature: (_a2 = detail.signature) != null ? _a2 : void 0
1314
+ signature: (_a16 = detail.signature) != null ? _a16 : void 0
425
1315
  };
426
1316
  }
427
1317
  break;
@@ -455,34 +1345,58 @@ var OpenRouterChatLanguageModel = class {
455
1345
  text: choice.message.reasoning
456
1346
  }
457
1347
  ] : [];
458
- return __spreadValues({
459
- response: {
460
- id: response.id,
461
- modelId: response.model
462
- },
463
- text: (_i = choice.message.content) != null ? _i : void 0,
464
- reasoning,
465
- toolCalls: (_j = choice.message.tool_calls) == null ? void 0 : _j.map((toolCall) => {
466
- var _a2;
467
- return {
468
- toolCallType: "function",
469
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1348
+ const content = [];
1349
+ if (choice.message.content) {
1350
+ content.push({
1351
+ type: "text",
1352
+ text: choice.message.content
1353
+ });
1354
+ }
1355
+ if (choice.message.tool_calls) {
1356
+ for (const toolCall of choice.message.tool_calls) {
1357
+ content.push({
1358
+ type: "tool-call",
1359
+ toolCallId: (_j = toolCall.id) != null ? _j : generateId(),
470
1360
  toolName: toolCall.function.name,
471
- args: toolCall.function.arguments
472
- };
473
- }),
1361
+ input: toolCall.function.arguments
1362
+ });
1363
+ }
1364
+ }
1365
+ return {
1366
+ content,
474
1367
  finishReason: mapOpenRouterFinishReason(choice.finish_reason),
475
1368
  usage: usageInfo,
476
- rawCall: { rawPrompt, rawSettings },
477
- rawResponse: { headers: responseHeaders },
478
1369
  warnings: [],
479
- logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
480
- }, hasProviderMetadata ? { providerMetadata } : {});
1370
+ providerMetadata: {
1371
+ openrouter: {
1372
+ usage: {
1373
+ promptTokens: (_k = usageInfo.inputTokens) != null ? _k : 0,
1374
+ completionTokens: (_l = usageInfo.outputTokens) != null ? _l : 0,
1375
+ totalTokens: (_m = usageInfo.totalTokens) != null ? _m : 0,
1376
+ cost: (_n = response.usage) == null ? void 0 : _n.cost,
1377
+ promptTokensDetails: {
1378
+ cachedTokens: (_q = (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : 0
1379
+ },
1380
+ completionTokensDetails: {
1381
+ reasoningTokens: (_t = (_s = (_r = response.usage) == null ? void 0 : _r.completion_tokens_details) == null ? void 0 : _s.reasoning_tokens) != null ? _t : 0
1382
+ }
1383
+ }
1384
+ }
1385
+ },
1386
+ request: { body: args },
1387
+ response: {
1388
+ id: response.id,
1389
+ modelId: response.model,
1390
+ headers: responseHeaders
1391
+ }
1392
+ };
481
1393
  }
482
1394
  async doStream(options) {
483
- var _a, _c;
484
- const args = this.getArgs(options);
485
- const { responseHeaders, value: response } = await postJsonToApi({
1395
+ var _a15;
1396
+ const providerOptions = options.providerOptions || {};
1397
+ const openrouterOptions = providerOptions.openrouter || {};
1398
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1399
+ const { value: response, responseHeaders } = await postJsonToApi({
486
1400
  url: this.config.url({
487
1401
  path: "/chat/completions",
488
1402
  modelId: this.modelId
@@ -493,7 +1407,7 @@ var OpenRouterChatLanguageModel = class {
493
1407
  // only include stream_options when in strict compatibility mode:
494
1408
  stream_options: this.config.compatibility === "strict" ? __spreadValues({
495
1409
  include_usage: true
496
- }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
1410
+ }, ((_a15 = this.settings.usage) == null ? void 0 : _a15.include) ? { include_usage: true } : {}) : void 0
497
1411
  }),
498
1412
  failedResponseHandler: openrouterFailedResponseHandler,
499
1413
  successfulResponseHandler: createEventSourceResponseHandler(
@@ -502,21 +1416,21 @@ var OpenRouterChatLanguageModel = class {
502
1416
  abortSignal: options.abortSignal,
503
1417
  fetch: this.config.fetch
504
1418
  });
505
- const _b = args, { messages: rawPrompt } = _b, rawSettings = __objRest(_b, ["messages"]);
506
1419
  const toolCalls = [];
507
1420
  let finishReason = "other";
508
- let usage = {
509
- promptTokens: Number.NaN,
510
- completionTokens: Number.NaN
1421
+ const usage = {
1422
+ inputTokens: Number.NaN,
1423
+ outputTokens: Number.NaN,
1424
+ totalTokens: Number.NaN,
1425
+ reasoningTokens: Number.NaN,
1426
+ cachedInputTokens: Number.NaN
511
1427
  };
512
- let logprobs;
513
1428
  const openrouterUsage = {};
514
- const shouldIncludeUsageAccounting = !!((_c = this.settings.usage) == null ? void 0 : _c.include);
515
1429
  return {
516
1430
  stream: response.pipeThrough(
517
1431
  new TransformStream({
518
1432
  transform(chunk, controller) {
519
- var _a2, _b2, _c2, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
1433
+ var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
520
1434
  if (!chunk.success) {
521
1435
  finishReason = "error";
522
1436
  controller.enqueue({ type: "error", error: chunk.error });
@@ -541,20 +1455,23 @@ var OpenRouterChatLanguageModel = class {
541
1455
  });
542
1456
  }
543
1457
  if (value.usage != null) {
544
- usage = {
545
- promptTokens: value.usage.prompt_tokens,
546
- completionTokens: value.usage.completion_tokens
547
- };
1458
+ usage.inputTokens = value.usage.prompt_tokens;
1459
+ usage.outputTokens = value.usage.completion_tokens;
1460
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
548
1461
  openrouterUsage.promptTokens = value.usage.prompt_tokens;
549
1462
  if (value.usage.prompt_tokens_details) {
1463
+ const cachedInputTokens = (_a16 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a16 : 0;
1464
+ usage.cachedInputTokens = cachedInputTokens;
550
1465
  openrouterUsage.promptTokensDetails = {
551
- cachedTokens: (_a2 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a2 : 0
1466
+ cachedTokens: cachedInputTokens
552
1467
  };
553
1468
  }
554
1469
  openrouterUsage.completionTokens = value.usage.completion_tokens;
555
1470
  if (value.usage.completion_tokens_details) {
1471
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
1472
+ usage.reasoningTokens = reasoningTokens;
556
1473
  openrouterUsage.completionTokensDetails = {
557
- reasoningTokens: (_b2 = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b2 : 0
1474
+ reasoningTokens
558
1475
  };
559
1476
  }
560
1477
  openrouterUsage.cost = value.usage.cost;
@@ -571,13 +1488,15 @@ var OpenRouterChatLanguageModel = class {
571
1488
  if (delta.content != null) {
572
1489
  controller.enqueue({
573
1490
  type: "text-delta",
574
- textDelta: delta.content
1491
+ delta: delta.content,
1492
+ id: generateId()
575
1493
  });
576
1494
  }
577
1495
  if (delta.reasoning != null) {
578
1496
  controller.enqueue({
579
- type: "reasoning",
580
- textDelta: delta.reasoning
1497
+ type: "reasoning-delta",
1498
+ delta: delta.reasoning,
1499
+ id: generateId()
581
1500
  });
582
1501
  }
583
1502
  if (delta.reasoning_details && delta.reasoning_details.length > 0) {
@@ -586,14 +1505,15 @@ var OpenRouterChatLanguageModel = class {
586
1505
  case "reasoning.text" /* Text */: {
587
1506
  if (detail.text) {
588
1507
  controller.enqueue({
589
- type: "reasoning",
590
- textDelta: detail.text
1508
+ type: "reasoning-delta",
1509
+ delta: detail.text,
1510
+ id: generateId()
591
1511
  });
592
1512
  }
593
1513
  if (detail.signature) {
594
1514
  controller.enqueue({
595
- type: "reasoning-signature",
596
- signature: detail.signature
1515
+ type: "reasoning-end",
1516
+ id: generateId()
597
1517
  });
598
1518
  }
599
1519
  break;
@@ -601,8 +1521,9 @@ var OpenRouterChatLanguageModel = class {
601
1521
  case "reasoning.encrypted" /* Encrypted */: {
602
1522
  if (detail.data) {
603
1523
  controller.enqueue({
604
- type: "redacted-reasoning",
605
- data: detail.data
1524
+ type: "reasoning-delta",
1525
+ delta: "[REDACTED]",
1526
+ id: generateId()
606
1527
  });
607
1528
  }
608
1529
  break;
@@ -610,8 +1531,9 @@ var OpenRouterChatLanguageModel = class {
610
1531
  case "reasoning.summary" /* Summary */: {
611
1532
  if (detail.summary) {
612
1533
  controller.enqueue({
613
- type: "reasoning",
614
- textDelta: detail.summary
1534
+ type: "reasoning-delta",
1535
+ delta: detail.summary,
1536
+ id: generateId()
615
1537
  });
616
1538
  }
617
1539
  break;
@@ -623,18 +1545,9 @@ var OpenRouterChatLanguageModel = class {
623
1545
  }
624
1546
  }
625
1547
  }
626
- const mappedLogprobs = mapOpenRouterChatLogProbsOutput(
627
- choice == null ? void 0 : choice.logprobs
628
- );
629
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
630
- if (logprobs === void 0) {
631
- logprobs = [];
632
- }
633
- logprobs.push(...mappedLogprobs);
634
- }
635
1548
  if (delta.tool_calls != null) {
636
1549
  for (const toolCallDelta of delta.tool_calls) {
637
- const index = toolCallDelta.index;
1550
+ const index = (_c = toolCallDelta.index) != null ? _c : toolCalls.length - 1;
638
1551
  if (toolCalls[index] == null) {
639
1552
  if (toolCallDelta.type !== "function") {
640
1553
  throw new InvalidResponseDataError({
@@ -648,7 +1561,7 @@ var OpenRouterChatLanguageModel = class {
648
1561
  message: `Expected 'id' to be a string.`
649
1562
  });
650
1563
  }
651
- if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
1564
+ if (((_d = toolCallDelta.function) == null ? void 0 : _d.name) == null) {
652
1565
  throw new InvalidResponseDataError({
653
1566
  data: toolCallDelta,
654
1567
  message: `Expected 'function.name' to be a string.`
@@ -659,7 +1572,7 @@ var OpenRouterChatLanguageModel = class {
659
1572
  type: "function",
660
1573
  function: {
661
1574
  name: toolCallDelta.function.name,
662
- arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
1575
+ arguments: (_e = toolCallDelta.function.arguments) != null ? _e : ""
663
1576
  },
664
1577
  sent: false
665
1578
  };
@@ -667,20 +1580,26 @@ var OpenRouterChatLanguageModel = class {
667
1580
  if (toolCall2 == null) {
668
1581
  throw new Error("Tool call is missing");
669
1582
  }
670
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
1583
+ if (((_f = toolCall2.function) == null ? void 0 : _f.name) != null && ((_g = toolCall2.function) == null ? void 0 : _g.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
671
1584
  controller.enqueue({
672
- type: "tool-call-delta",
673
- toolCallType: "function",
674
- toolCallId: toolCall2.id,
675
- toolName: toolCall2.function.name,
676
- argsTextDelta: toolCall2.function.arguments
1585
+ type: "tool-input-start",
1586
+ id: toolCall2.id,
1587
+ toolName: toolCall2.function.name
1588
+ });
1589
+ controller.enqueue({
1590
+ type: "tool-input-delta",
1591
+ id: toolCall2.id,
1592
+ delta: toolCall2.function.arguments
1593
+ });
1594
+ controller.enqueue({
1595
+ type: "tool-input-end",
1596
+ id: toolCall2.id
677
1597
  });
678
1598
  controller.enqueue({
679
1599
  type: "tool-call",
680
- toolCallType: "function",
681
- toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
1600
+ toolCallId: toolCall2.id,
682
1601
  toolName: toolCall2.function.name,
683
- args: toolCall2.function.arguments
1602
+ input: toolCall2.function.arguments
684
1603
  });
685
1604
  toolCall2.sent = true;
686
1605
  }
@@ -690,23 +1609,27 @@ var OpenRouterChatLanguageModel = class {
690
1609
  if (toolCall == null) {
691
1610
  throw new Error("Tool call is missing");
692
1611
  }
693
- if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
694
- toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
1612
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.name) != null) {
1613
+ controller.enqueue({
1614
+ type: "tool-input-start",
1615
+ id: toolCall.id,
1616
+ toolName: toolCall.function.name
1617
+ });
1618
+ }
1619
+ if (((_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null) {
1620
+ toolCall.function.arguments += (_k = (_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null ? _k : "";
695
1621
  }
696
1622
  controller.enqueue({
697
- type: "tool-call-delta",
698
- toolCallType: "function",
699
- toolCallId: toolCall.id,
700
- toolName: toolCall.function.name,
701
- argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
1623
+ type: "tool-input-delta",
1624
+ id: toolCall.id,
1625
+ delta: (_l = toolCallDelta.function.arguments) != null ? _l : ""
702
1626
  });
703
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
1627
+ if (((_m = toolCall.function) == null ? void 0 : _m.name) != null && ((_n = toolCall.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall.function.arguments)) {
704
1628
  controller.enqueue({
705
1629
  type: "tool-call",
706
- toolCallType: "function",
707
- toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
1630
+ toolCallId: (_o = toolCall.id) != null ? _o : generateId(),
708
1631
  toolName: toolCall.function.name,
709
- args: toolCall.function.arguments
1632
+ input: toolCall.function.arguments
710
1633
  });
711
1634
  toolCall.sent = true;
712
1635
  }
@@ -714,206 +1637,42 @@ var OpenRouterChatLanguageModel = class {
714
1637
  }
715
1638
  },
716
1639
  flush(controller) {
717
- var _a2;
1640
+ var _a16;
718
1641
  if (finishReason === "tool-calls") {
719
1642
  for (const toolCall of toolCalls) {
720
1643
  if (!toolCall.sent) {
721
1644
  controller.enqueue({
722
1645
  type: "tool-call",
723
- toolCallType: "function",
724
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
1646
+ toolCallId: (_a16 = toolCall.id) != null ? _a16 : generateId(),
725
1647
  toolName: toolCall.function.name,
726
1648
  // Coerce invalid arguments to an empty JSON object
727
- args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
1649
+ input: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
728
1650
  });
729
1651
  toolCall.sent = true;
730
1652
  }
731
1653
  }
732
1654
  }
733
- const providerMetadata = {};
734
- if (shouldIncludeUsageAccounting && (openrouterUsage.totalTokens !== void 0 || openrouterUsage.cost !== void 0 || openrouterUsage.promptTokensDetails !== void 0 || openrouterUsage.completionTokensDetails !== void 0)) {
735
- providerMetadata.openrouter = {
736
- usage: openrouterUsage
737
- };
738
- }
739
- const hasProviderMetadata = Object.keys(providerMetadata).length > 0 && shouldIncludeUsageAccounting;
740
- controller.enqueue(__spreadValues({
1655
+ controller.enqueue({
741
1656
  type: "finish",
742
1657
  finishReason,
743
- logprobs,
744
- usage
745
- }, hasProviderMetadata ? { providerMetadata } : {}));
1658
+ usage,
1659
+ providerMetadata: {
1660
+ openrouter: {
1661
+ usage: openrouterUsage
1662
+ }
1663
+ }
1664
+ });
746
1665
  }
747
1666
  })
748
1667
  ),
749
- rawCall: { rawPrompt, rawSettings },
750
- rawResponse: { headers: responseHeaders },
751
- warnings: []
1668
+ warnings: [],
1669
+ request: { body: args },
1670
+ response: { headers: responseHeaders }
752
1671
  };
753
1672
  }
754
1673
  };
755
- var OpenRouterChatCompletionBaseResponseSchema = z3.object({
756
- id: z3.string().optional(),
757
- model: z3.string().optional(),
758
- usage: z3.object({
759
- prompt_tokens: z3.number(),
760
- prompt_tokens_details: z3.object({
761
- cached_tokens: z3.number()
762
- }).nullish(),
763
- completion_tokens: z3.number(),
764
- completion_tokens_details: z3.object({
765
- reasoning_tokens: z3.number()
766
- }).nullish(),
767
- total_tokens: z3.number(),
768
- cost: z3.number().optional()
769
- }).nullish()
770
- });
771
- var OpenRouterNonStreamChatCompletionResponseSchema = OpenRouterChatCompletionBaseResponseSchema.extend({
772
- choices: z3.array(
773
- z3.object({
774
- message: z3.object({
775
- role: z3.literal("assistant"),
776
- content: z3.string().nullable().optional(),
777
- reasoning: z3.string().nullable().optional(),
778
- reasoning_details: ReasoningDetailArraySchema.nullish(),
779
- tool_calls: z3.array(
780
- z3.object({
781
- id: z3.string().optional().nullable(),
782
- type: z3.literal("function"),
783
- function: z3.object({
784
- name: z3.string(),
785
- arguments: z3.string()
786
- })
787
- })
788
- ).optional()
789
- }),
790
- index: z3.number(),
791
- logprobs: z3.object({
792
- content: z3.array(
793
- z3.object({
794
- token: z3.string(),
795
- logprob: z3.number(),
796
- top_logprobs: z3.array(
797
- z3.object({
798
- token: z3.string(),
799
- logprob: z3.number()
800
- })
801
- )
802
- })
803
- ).nullable()
804
- }).nullable().optional(),
805
- finish_reason: z3.string().optional().nullable()
806
- })
807
- )
808
- });
809
- var OpenRouterStreamChatCompletionChunkSchema = z3.union([
810
- OpenRouterChatCompletionBaseResponseSchema.extend({
811
- choices: z3.array(
812
- z3.object({
813
- delta: z3.object({
814
- role: z3.enum(["assistant"]).optional(),
815
- content: z3.string().nullish(),
816
- reasoning: z3.string().nullish().optional(),
817
- reasoning_details: ReasoningDetailArraySchema.nullish(),
818
- tool_calls: z3.array(
819
- z3.object({
820
- index: z3.number(),
821
- id: z3.string().nullish(),
822
- type: z3.literal("function").optional(),
823
- function: z3.object({
824
- name: z3.string().nullish(),
825
- arguments: z3.string().nullish()
826
- })
827
- })
828
- ).nullish()
829
- }).nullish(),
830
- logprobs: z3.object({
831
- content: z3.array(
832
- z3.object({
833
- token: z3.string(),
834
- logprob: z3.number(),
835
- top_logprobs: z3.array(
836
- z3.object({
837
- token: z3.string(),
838
- logprob: z3.number()
839
- })
840
- )
841
- })
842
- ).nullable()
843
- }).nullish(),
844
- finish_reason: z3.string().nullable().optional(),
845
- index: z3.number()
846
- })
847
- )
848
- }),
849
- OpenRouterErrorResponseSchema
850
- ]);
851
- function prepareToolsAndToolChoice(mode) {
852
- var _a;
853
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
854
- if (tools == null) {
855
- return { tools: void 0, tool_choice: void 0 };
856
- }
857
- const mappedTools = tools.map((tool) => {
858
- if (isFunctionTool(tool)) {
859
- return {
860
- type: "function",
861
- function: {
862
- name: tool.name,
863
- description: tool.description,
864
- parameters: tool.parameters
865
- }
866
- };
867
- }
868
- return {
869
- type: "function",
870
- function: {
871
- name: tool.name
872
- }
873
- };
874
- });
875
- const toolChoice = mode.toolChoice;
876
- if (toolChoice == null) {
877
- return { tools: mappedTools, tool_choice: void 0 };
878
- }
879
- const type = toolChoice.type;
880
- switch (type) {
881
- case "auto":
882
- case "none":
883
- case "required":
884
- return { tools: mappedTools, tool_choice: type };
885
- case "tool":
886
- return {
887
- tools: mappedTools,
888
- tool_choice: {
889
- type: "function",
890
- function: {
891
- name: toolChoice.toolName
892
- }
893
- }
894
- };
895
- default: {
896
- const _exhaustiveCheck = type;
897
- throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
898
- }
899
- }
900
- }
901
-
902
- // src/openrouter-completion-language-model.ts
903
- import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
904
- import {
905
- combineHeaders as combineHeaders2,
906
- createEventSourceResponseHandler as createEventSourceResponseHandler2,
907
- createJsonResponseHandler as createJsonResponseHandler2,
908
- postJsonToApi as postJsonToApi2
909
- } from "@ai-sdk/provider-utils";
910
- import { z as z4 } from "zod";
911
1674
 
912
- // src/convert-to-openrouter-completion-prompt.ts
913
- import {
914
- InvalidPromptError,
915
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
916
- } from "@ai-sdk/provider";
1675
+ // src/completion/convert-to-openrouter-completion-prompt.ts
917
1676
  function convertToOpenRouterCompletionPrompt({
918
1677
  prompt,
919
1678
  inputFormat,
@@ -934,7 +1693,7 @@ function convertToOpenRouterCompletionPrompt({
934
1693
  switch (role) {
935
1694
  case "system": {
936
1695
  throw new InvalidPromptError({
937
- message: "Unexpected system message in prompt: ${content}",
1696
+ message: `Unexpected system message in prompt: ${content}`,
938
1697
  prompt
939
1698
  });
940
1699
  }
@@ -944,21 +1703,13 @@ function convertToOpenRouterCompletionPrompt({
944
1703
  case "text": {
945
1704
  return part.text;
946
1705
  }
947
- case "image": {
948
- throw new UnsupportedFunctionalityError2({
949
- functionality: "images"
950
- });
951
- }
952
1706
  case "file": {
953
- throw new UnsupportedFunctionalityError2({
1707
+ throw new UnsupportedFunctionalityError({
954
1708
  functionality: "file attachments"
955
1709
  });
956
1710
  }
957
1711
  default: {
958
- const _exhaustiveCheck = part;
959
- throw new Error(
960
- `Unsupported content type: ${_exhaustiveCheck}`
961
- );
1712
+ return "";
962
1713
  }
963
1714
  }
964
1715
  }).join("");
@@ -969,39 +1720,38 @@ ${userMessage}
969
1720
  break;
970
1721
  }
971
1722
  case "assistant": {
972
- const assistantMessage = content.map((part) => {
973
- switch (part.type) {
974
- case "text": {
975
- return part.text;
976
- }
977
- case "tool-call": {
978
- throw new UnsupportedFunctionalityError2({
979
- functionality: "tool-call messages"
980
- });
981
- }
982
- case "reasoning": {
983
- throw new UnsupportedFunctionalityError2({
984
- functionality: "reasoning messages"
985
- });
986
- }
987
- case "redacted-reasoning": {
988
- throw new UnsupportedFunctionalityError2({
989
- functionality: "redacted reasoning messages"
990
- });
991
- }
992
- case "file": {
993
- throw new UnsupportedFunctionalityError2({
994
- functionality: "file attachments"
995
- });
996
- }
997
- default: {
998
- const _exhaustiveCheck = part;
999
- throw new Error(
1000
- `Unsupported content type: ${_exhaustiveCheck}`
1001
- );
1723
+ const assistantMessage = content.map(
1724
+ (part) => {
1725
+ switch (part.type) {
1726
+ case "text": {
1727
+ return part.text;
1728
+ }
1729
+ case "tool-call": {
1730
+ throw new UnsupportedFunctionalityError({
1731
+ functionality: "tool-call messages"
1732
+ });
1733
+ }
1734
+ case "tool-result": {
1735
+ throw new UnsupportedFunctionalityError({
1736
+ functionality: "tool-result messages"
1737
+ });
1738
+ }
1739
+ case "reasoning": {
1740
+ throw new UnsupportedFunctionalityError({
1741
+ functionality: "reasoning messages"
1742
+ });
1743
+ }
1744
+ case "file": {
1745
+ throw new UnsupportedFunctionalityError({
1746
+ functionality: "file attachments"
1747
+ });
1748
+ }
1749
+ default: {
1750
+ return "";
1751
+ }
1002
1752
  }
1003
1753
  }
1004
- }).join("");
1754
+ ).join("");
1005
1755
  text += `${assistant}:
1006
1756
  ${assistantMessage}
1007
1757
 
@@ -1009,13 +1759,12 @@ ${assistantMessage}
1009
1759
  break;
1010
1760
  }
1011
1761
  case "tool": {
1012
- throw new UnsupportedFunctionalityError2({
1762
+ throw new UnsupportedFunctionalityError({
1013
1763
  functionality: "tool messages"
1014
1764
  });
1015
1765
  }
1016
1766
  default: {
1017
- const _exhaustiveCheck = role;
1018
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1767
+ break;
1019
1768
  }
1020
1769
  }
1021
1770
  }
@@ -1026,40 +1775,63 @@ ${assistantMessage}
1026
1775
  };
1027
1776
  }
1028
1777
 
1029
- // src/map-openrouter-completion-logprobs.ts
1030
- function mapOpenRouterCompletionLogProbs(logprobs) {
1031
- return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => {
1032
- var _a, _b;
1033
- return {
1034
- token,
1035
- logprob: (_a = logprobs.token_logprobs[index]) != null ? _a : 0,
1036
- topLogprobs: logprobs.top_logprobs ? Object.entries((_b = logprobs.top_logprobs[index]) != null ? _b : {}).map(
1037
- ([token2, logprob]) => ({
1038
- token: token2,
1039
- logprob
1040
- })
1041
- ) : []
1042
- };
1043
- });
1044
- }
1778
+ // src/completion/schemas.ts
1779
+ import { z as z6 } from "zod/v4";
1780
+ var OpenRouterCompletionChunkSchema = z6.union([
1781
+ z6.object({
1782
+ id: z6.string().optional(),
1783
+ model: z6.string().optional(),
1784
+ choices: z6.array(
1785
+ z6.object({
1786
+ text: z6.string(),
1787
+ reasoning: z6.string().nullish().optional(),
1788
+ reasoning_details: ReasoningDetailArraySchema.nullish(),
1789
+ finish_reason: z6.string().nullish(),
1790
+ index: z6.number().nullish(),
1791
+ logprobs: z6.object({
1792
+ tokens: z6.array(z6.string()),
1793
+ token_logprobs: z6.array(z6.number()),
1794
+ top_logprobs: z6.array(z6.record(z6.string(), z6.number())).nullable()
1795
+ }).nullable().optional()
1796
+ })
1797
+ ),
1798
+ usage: z6.object({
1799
+ prompt_tokens: z6.number(),
1800
+ prompt_tokens_details: z6.object({
1801
+ cached_tokens: z6.number()
1802
+ }).nullish(),
1803
+ completion_tokens: z6.number(),
1804
+ completion_tokens_details: z6.object({
1805
+ reasoning_tokens: z6.number()
1806
+ }).nullish(),
1807
+ total_tokens: z6.number(),
1808
+ cost: z6.number().optional()
1809
+ }).nullish()
1810
+ }),
1811
+ OpenRouterErrorResponseSchema
1812
+ ]);
1045
1813
 
1046
- // src/openrouter-completion-language-model.ts
1814
+ // src/completion/index.ts
1047
1815
  var OpenRouterCompletionLanguageModel = class {
1048
1816
  constructor(modelId, settings, config) {
1049
- this.specificationVersion = "v1";
1817
+ this.specificationVersion = "v2";
1818
+ this.provider = "openrouter";
1819
+ this.supportedUrls = {
1820
+ "image/*": [
1821
+ /^data:image\/[a-zA-Z]+;base64,/,
1822
+ /^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i
1823
+ ],
1824
+ "text/*": [/^data:text\//, /^https?:\/\/.+$/],
1825
+ "application/*": [/^data:application\//, /^https?:\/\/.+$/]
1826
+ };
1050
1827
  this.defaultObjectGenerationMode = void 0;
1051
1828
  this.modelId = modelId;
1052
1829
  this.settings = settings;
1053
1830
  this.config = config;
1054
1831
  }
1055
- get provider() {
1056
- return this.config.provider;
1057
- }
1058
1832
  getArgs({
1059
- mode,
1060
- inputFormat,
1061
1833
  prompt,
1062
- maxTokens,
1834
+ maxOutputTokens,
1063
1835
  temperature,
1064
1836
  topP,
1065
1837
  frequencyPenalty,
@@ -1068,16 +1840,24 @@ var OpenRouterCompletionLanguageModel = class {
1068
1840
  responseFormat,
1069
1841
  topK,
1070
1842
  stopSequences,
1071
- providerMetadata
1843
+ tools,
1844
+ toolChoice
1072
1845
  }) {
1073
- var _a, _b;
1074
- const type = mode.type;
1075
- const extraCallingBody = (_a = providerMetadata == null ? void 0 : providerMetadata.openrouter) != null ? _a : {};
1076
1846
  const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
1077
1847
  prompt,
1078
- inputFormat
1848
+ inputFormat: "prompt"
1079
1849
  });
1080
- const baseArgs = __spreadValues(__spreadValues(__spreadValues({
1850
+ if (tools == null ? void 0 : tools.length) {
1851
+ throw new UnsupportedFunctionalityError({
1852
+ functionality: "tools"
1853
+ });
1854
+ }
1855
+ if (toolChoice) {
1856
+ throw new UnsupportedFunctionalityError({
1857
+ functionality: "toolChoice"
1858
+ });
1859
+ }
1860
+ return __spreadValues(__spreadValues({
1081
1861
  // model id:
1082
1862
  model: this.modelId,
1083
1863
  models: this.settings.models,
@@ -1087,7 +1867,7 @@ var OpenRouterCompletionLanguageModel = class {
1087
1867
  suffix: this.settings.suffix,
1088
1868
  user: this.settings.user,
1089
1869
  // standardized settings:
1090
- max_tokens: maxTokens,
1870
+ max_tokens: maxOutputTokens,
1091
1871
  temperature,
1092
1872
  top_p: topP,
1093
1873
  frequency_penalty: frequencyPenalty,
@@ -1101,58 +1881,27 @@ var OpenRouterCompletionLanguageModel = class {
1101
1881
  // OpenRouter specific settings:
1102
1882
  include_reasoning: this.settings.includeReasoning,
1103
1883
  reasoning: this.settings.reasoning
1104
- }, this.config.extraBody), this.settings.extraBody), extraCallingBody);
1105
- switch (type) {
1106
- case "regular": {
1107
- if ((_b = mode.tools) == null ? void 0 : _b.length) {
1108
- throw new UnsupportedFunctionalityError3({
1109
- functionality: "tools"
1110
- });
1111
- }
1112
- if (mode.toolChoice) {
1113
- throw new UnsupportedFunctionalityError3({
1114
- functionality: "toolChoice"
1115
- });
1116
- }
1117
- return baseArgs;
1118
- }
1119
- case "object-json": {
1120
- throw new UnsupportedFunctionalityError3({
1121
- functionality: "object-json mode"
1122
- });
1123
- }
1124
- case "object-tool": {
1125
- throw new UnsupportedFunctionalityError3({
1126
- functionality: "object-tool mode"
1127
- });
1128
- }
1129
- // Handle all non-text types with a single default case
1130
- default: {
1131
- const _exhaustiveCheck = type;
1132
- throw new UnsupportedFunctionalityError3({
1133
- functionality: `${_exhaustiveCheck} mode`
1134
- });
1135
- }
1136
- }
1884
+ }, this.config.extraBody), this.settings.extraBody);
1137
1885
  }
1138
1886
  async doGenerate(options) {
1139
- var _b, _c, _d, _e, _f;
1140
- const args = this.getArgs(options);
1141
- const { responseHeaders, value: response } = await postJsonToApi2({
1887
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
1888
+ const providerOptions = options.providerOptions || {};
1889
+ const openrouterOptions = providerOptions.openrouter || {};
1890
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1891
+ const { value: response, responseHeaders } = await postJsonToApi({
1142
1892
  url: this.config.url({
1143
1893
  path: "/completions",
1144
1894
  modelId: this.modelId
1145
1895
  }),
1146
- headers: combineHeaders2(this.config.headers(), options.headers),
1896
+ headers: combineHeaders(this.config.headers(), options.headers),
1147
1897
  body: args,
1148
1898
  failedResponseHandler: openrouterFailedResponseHandler,
1149
- successfulResponseHandler: createJsonResponseHandler2(
1899
+ successfulResponseHandler: createJsonResponseHandler(
1150
1900
  OpenRouterCompletionChunkSchema
1151
1901
  ),
1152
1902
  abortSignal: options.abortSignal,
1153
1903
  fetch: this.config.fetch
1154
1904
  });
1155
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1156
1905
  if ("error" in response) {
1157
1906
  throw new Error(`${response.error.message}`);
1158
1907
  }
@@ -1161,54 +1910,62 @@ var OpenRouterCompletionLanguageModel = class {
1161
1910
  throw new Error("No choice in OpenRouter completion response");
1162
1911
  }
1163
1912
  return {
1164
- response: {
1165
- id: response.id,
1166
- modelId: response.model
1167
- },
1168
- text: (_b = choice.text) != null ? _b : "",
1169
- reasoning: choice.reasoning || void 0,
1913
+ content: [
1914
+ {
1915
+ type: "text",
1916
+ text: (_a15 = choice.text) != null ? _a15 : ""
1917
+ }
1918
+ ],
1919
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1170
1920
  usage: {
1171
- promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : 0,
1172
- completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : 0
1921
+ inputTokens: (_c = (_b = response.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : 0,
1922
+ outputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.completion_tokens) != null ? _e : 0,
1923
+ totalTokens: ((_g = (_f = response.usage) == null ? void 0 : _f.prompt_tokens) != null ? _g : 0) + ((_i = (_h = response.usage) == null ? void 0 : _h.completion_tokens) != null ? _i : 0),
1924
+ reasoningTokens: (_l = (_k = (_j = response.usage) == null ? void 0 : _j.completion_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : 0,
1925
+ cachedInputTokens: (_o = (_n = (_m = response.usage) == null ? void 0 : _m.prompt_tokens_details) == null ? void 0 : _n.cached_tokens) != null ? _o : 0
1173
1926
  },
1174
- finishReason: mapOpenRouterFinishReason(choice.finish_reason),
1175
- logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
1176
- rawCall: { rawPrompt, rawSettings },
1177
- rawResponse: { headers: responseHeaders },
1178
- warnings: []
1927
+ warnings: [],
1928
+ response: {
1929
+ headers: responseHeaders
1930
+ }
1179
1931
  };
1180
1932
  }
1181
1933
  async doStream(options) {
1182
- const args = this.getArgs(options);
1183
- const { responseHeaders, value: response } = await postJsonToApi2({
1934
+ const providerOptions = options.providerOptions || {};
1935
+ const openrouterOptions = providerOptions.openrouter || {};
1936
+ const args = __spreadValues(__spreadValues({}, this.getArgs(options)), openrouterOptions);
1937
+ const { value: response, responseHeaders } = await postJsonToApi({
1184
1938
  url: this.config.url({
1185
1939
  path: "/completions",
1186
1940
  modelId: this.modelId
1187
1941
  }),
1188
- headers: combineHeaders2(this.config.headers(), options.headers),
1189
- body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
1942
+ headers: combineHeaders(this.config.headers(), options.headers),
1943
+ body: __spreadProps(__spreadValues({}, args), {
1190
1944
  stream: true,
1191
1945
  // only include stream_options when in strict compatibility mode:
1192
1946
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1193
1947
  }),
1194
1948
  failedResponseHandler: openrouterFailedResponseHandler,
1195
- successfulResponseHandler: createEventSourceResponseHandler2(
1949
+ successfulResponseHandler: createEventSourceResponseHandler(
1196
1950
  OpenRouterCompletionChunkSchema
1197
1951
  ),
1198
1952
  abortSignal: options.abortSignal,
1199
1953
  fetch: this.config.fetch
1200
1954
  });
1201
- const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
1202
1955
  let finishReason = "other";
1203
- let usage = {
1204
- promptTokens: Number.NaN,
1205
- completionTokens: Number.NaN
1956
+ const usage = {
1957
+ inputTokens: Number.NaN,
1958
+ outputTokens: Number.NaN,
1959
+ totalTokens: Number.NaN,
1960
+ reasoningTokens: Number.NaN,
1961
+ cachedInputTokens: Number.NaN
1206
1962
  };
1207
- let logprobs;
1963
+ const openrouterUsage = {};
1208
1964
  return {
1209
1965
  stream: response.pipeThrough(
1210
1966
  new TransformStream({
1211
1967
  transform(chunk, controller) {
1968
+ var _a15, _b;
1212
1969
  if (!chunk.success) {
1213
1970
  finishReason = "error";
1214
1971
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1221,10 +1978,27 @@ var OpenRouterCompletionLanguageModel = class {
1221
1978
  return;
1222
1979
  }
1223
1980
  if (value.usage != null) {
1224
- usage = {
1225
- promptTokens: value.usage.prompt_tokens,
1226
- completionTokens: value.usage.completion_tokens
1227
- };
1981
+ usage.inputTokens = value.usage.prompt_tokens;
1982
+ usage.outputTokens = value.usage.completion_tokens;
1983
+ usage.totalTokens = value.usage.prompt_tokens + value.usage.completion_tokens;
1984
+ openrouterUsage.promptTokens = value.usage.prompt_tokens;
1985
+ if (value.usage.prompt_tokens_details) {
1986
+ const cachedInputTokens = (_a15 = value.usage.prompt_tokens_details.cached_tokens) != null ? _a15 : 0;
1987
+ usage.cachedInputTokens = cachedInputTokens;
1988
+ openrouterUsage.promptTokensDetails = {
1989
+ cachedTokens: cachedInputTokens
1990
+ };
1991
+ }
1992
+ openrouterUsage.completionTokens = value.usage.completion_tokens;
1993
+ if (value.usage.completion_tokens_details) {
1994
+ const reasoningTokens = (_b = value.usage.completion_tokens_details.reasoning_tokens) != null ? _b : 0;
1995
+ usage.reasoningTokens = reasoningTokens;
1996
+ openrouterUsage.completionTokensDetails = {
1997
+ reasoningTokens
1998
+ };
1999
+ }
2000
+ openrouterUsage.cost = value.usage.cost;
2001
+ openrouterUsage.totalTokens = value.usage.total_tokens;
1228
2002
  }
1229
2003
  const choice = value.choices[0];
1230
2004
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -1233,60 +2007,31 @@ var OpenRouterCompletionLanguageModel = class {
1233
2007
  if ((choice == null ? void 0 : choice.text) != null) {
1234
2008
  controller.enqueue({
1235
2009
  type: "text-delta",
1236
- textDelta: choice.text
2010
+ delta: choice.text,
2011
+ id: generateId()
1237
2012
  });
1238
2013
  }
1239
- const mappedLogprobs = mapOpenRouterCompletionLogProbs(
1240
- choice == null ? void 0 : choice.logprobs
1241
- );
1242
- if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
1243
- if (logprobs === void 0) {
1244
- logprobs = [];
1245
- }
1246
- logprobs.push(...mappedLogprobs);
1247
- }
1248
2014
  },
1249
2015
  flush(controller) {
1250
2016
  controller.enqueue({
1251
2017
  type: "finish",
1252
2018
  finishReason,
1253
- logprobs,
1254
- usage
2019
+ usage,
2020
+ providerMetadata: {
2021
+ openrouter: {
2022
+ usage: openrouterUsage
2023
+ }
2024
+ }
1255
2025
  });
1256
2026
  }
1257
2027
  })
1258
2028
  ),
1259
- rawCall: { rawPrompt, rawSettings },
1260
- rawResponse: { headers: responseHeaders },
1261
- warnings: []
2029
+ response: {
2030
+ headers: responseHeaders
2031
+ }
1262
2032
  };
1263
2033
  }
1264
2034
  };
1265
- var OpenRouterCompletionChunkSchema = z4.union([
1266
- z4.object({
1267
- id: z4.string().optional(),
1268
- model: z4.string().optional(),
1269
- choices: z4.array(
1270
- z4.object({
1271
- text: z4.string(),
1272
- reasoning: z4.string().nullish().optional(),
1273
- reasoning_details: ReasoningDetailArraySchema.nullish(),
1274
- finish_reason: z4.string().nullish(),
1275
- index: z4.number(),
1276
- logprobs: z4.object({
1277
- tokens: z4.array(z4.string()),
1278
- token_logprobs: z4.array(z4.number()),
1279
- top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
1280
- }).nullable().optional()
1281
- })
1282
- ),
1283
- usage: z4.object({
1284
- prompt_tokens: z4.number(),
1285
- completion_tokens: z4.number()
1286
- }).optional().nullable()
1287
- }),
1288
- OpenRouterErrorResponseSchema
1289
- ]);
1290
2035
  export {
1291
2036
  OpenRouterChatLanguageModel,
1292
2037
  OpenRouterCompletionLanguageModel