@openrouter/ai-sdk-provider 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1062 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __defProps = Object.defineProperties;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
+ var __spreadValues = (a, b) => {
12
+ for (var prop in b || (b = {}))
13
+ if (__hasOwnProp.call(b, prop))
14
+ __defNormalProp(a, prop, b[prop]);
15
+ if (__getOwnPropSymbols)
16
+ for (var prop of __getOwnPropSymbols(b)) {
17
+ if (__propIsEnum.call(b, prop))
18
+ __defNormalProp(a, prop, b[prop]);
19
+ }
20
+ return a;
21
+ };
22
+ var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
23
+ var __objRest = (source, exclude) => {
24
+ var target = {};
25
+ for (var prop in source)
26
+ if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
27
+ target[prop] = source[prop];
28
+ if (source != null && __getOwnPropSymbols)
29
+ for (var prop of __getOwnPropSymbols(source)) {
30
+ if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
31
+ target[prop] = source[prop];
32
+ }
33
+ return target;
34
+ };
35
+ var __export = (target, all) => {
36
+ for (var name in all)
37
+ __defProp(target, name, { get: all[name], enumerable: true });
38
+ };
39
+ var __copyProps = (to, from, except, desc) => {
40
+ if (from && typeof from === "object" || typeof from === "function") {
41
+ for (let key of __getOwnPropNames(from))
42
+ if (!__hasOwnProp.call(to, key) && key !== except)
43
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
44
+ }
45
+ return to;
46
+ };
47
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
48
+
49
+ // src/index.ts
50
+ var src_exports = {};
51
+ __export(src_exports, {
52
+ OpenRouter: () => OpenRouter,
53
+ createOpenRouter: () => createOpenRouter,
54
+ openrouter: () => openrouter
55
+ });
56
+ module.exports = __toCommonJS(src_exports);
57
+
58
+ // src/openrouter-facade.ts
59
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
60
+
61
+ // src/openrouter-chat-language-model.ts
62
+ var import_provider = require("@ai-sdk/provider");
63
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
64
+ var import_zod2 = require("zod");
65
+
66
+ // src/convert-to-openrouter-chat-messages.ts
67
+ var import_provider_utils = require("@ai-sdk/provider-utils");
68
+ function convertToOpenRouterChatMessages(prompt) {
69
+ var _a;
70
+ const messages = [];
71
+ for (const { role, content } of prompt) {
72
+ switch (role) {
73
+ case "system": {
74
+ messages.push({ role: "system", content });
75
+ break;
76
+ }
77
+ case "user": {
78
+ if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
79
+ messages.push({ role: "user", content: content[0].text });
80
+ break;
81
+ }
82
+ messages.push({
83
+ role: "user",
84
+ content: content.map((part) => {
85
+ var _a2;
86
+ switch (part.type) {
87
+ case "text": {
88
+ return { type: "text", text: part.text };
89
+ }
90
+ case "image": {
91
+ return {
92
+ type: "image_url",
93
+ image_url: {
94
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a2 = part.mimeType) != null ? _a2 : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`
95
+ }
96
+ };
97
+ }
98
+ }
99
+ })
100
+ });
101
+ break;
102
+ }
103
+ case "assistant": {
104
+ let text = "";
105
+ const toolCalls = [];
106
+ for (const part of content) {
107
+ switch (part.type) {
108
+ case "text": {
109
+ text += part.text;
110
+ break;
111
+ }
112
+ case "tool-call": {
113
+ toolCalls.push({
114
+ id: part.toolCallId,
115
+ type: "function",
116
+ function: {
117
+ name: part.toolName,
118
+ arguments: JSON.stringify(part.args)
119
+ }
120
+ });
121
+ break;
122
+ }
123
+ default: {
124
+ const _exhaustiveCheck = part;
125
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
126
+ }
127
+ }
128
+ }
129
+ messages.push({
130
+ role: "assistant",
131
+ content: text,
132
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
133
+ });
134
+ break;
135
+ }
136
+ case "tool": {
137
+ for (const toolResponse of content) {
138
+ messages.push({
139
+ role: "tool",
140
+ tool_call_id: toolResponse.toolCallId,
141
+ content: JSON.stringify(toolResponse.result)
142
+ });
143
+ }
144
+ break;
145
+ }
146
+ default: {
147
+ const _exhaustiveCheck = role;
148
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
149
+ }
150
+ }
151
+ }
152
+ return messages;
153
+ }
154
+
155
+ // src/map-openrouter-chat-logprobs.ts
156
+ function mapOpenRouterChatLogProbsOutput(logprobs) {
157
+ var _a, _b;
158
+ return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
159
+ token,
160
+ logprob,
161
+ topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
162
+ token: token2,
163
+ logprob: logprob2
164
+ })) : []
165
+ }))) != null ? _b : void 0;
166
+ }
167
+
168
+ // src/map-openrouter-finish-reason.ts
169
+ function mapOpenRouterFinishReason(finishReason) {
170
+ switch (finishReason) {
171
+ case "stop":
172
+ return "stop";
173
+ case "length":
174
+ return "length";
175
+ case "content_filter":
176
+ return "content-filter";
177
+ case "function_call":
178
+ case "tool_calls":
179
+ return "tool-calls";
180
+ default:
181
+ return "unknown";
182
+ }
183
+ }
184
+
185
+ // src/openrouter-error.ts
186
+ var import_zod = require("zod");
187
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
188
+ var openAIErrorDataSchema = import_zod.z.object({
189
+ error: import_zod.z.object({
190
+ message: import_zod.z.string(),
191
+ type: import_zod.z.string(),
192
+ param: import_zod.z.any().nullable(),
193
+ code: import_zod.z.string().nullable()
194
+ })
195
+ });
196
+ var openrouterFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
197
+ errorSchema: openAIErrorDataSchema,
198
+ errorToMessage: (data) => data.error.message
199
+ });
200
+
201
+ // src/openrouter-chat-language-model.ts
202
+ var OpenRouterChatLanguageModel = class {
203
+ constructor(modelId, settings, config) {
204
+ this.specificationVersion = "v1";
205
+ this.defaultObjectGenerationMode = "tool";
206
+ this.modelId = modelId;
207
+ this.settings = settings;
208
+ this.config = config;
209
+ }
210
+ get provider() {
211
+ return this.config.provider;
212
+ }
213
+ getArgs({
214
+ mode,
215
+ prompt,
216
+ maxTokens,
217
+ temperature,
218
+ topP,
219
+ frequencyPenalty,
220
+ presencePenalty,
221
+ seed
222
+ }) {
223
+ const type = mode.type;
224
+ const baseArgs = {
225
+ // model id:
226
+ model: this.modelId,
227
+ // model specific settings:
228
+ logit_bias: this.settings.logitBias,
229
+ logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
230
+ top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
231
+ user: this.settings.user,
232
+ parallel_tool_calls: this.settings.parallelToolCalls,
233
+ // standardized settings:
234
+ max_tokens: maxTokens,
235
+ temperature,
236
+ top_p: topP,
237
+ frequency_penalty: frequencyPenalty,
238
+ presence_penalty: presencePenalty,
239
+ seed,
240
+ // messages:
241
+ messages: convertToOpenRouterChatMessages(prompt)
242
+ };
243
+ switch (type) {
244
+ case "regular": {
245
+ return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
246
+ }
247
+ case "object-json": {
248
+ return __spreadProps(__spreadValues({}, baseArgs), {
249
+ response_format: { type: "json_object" }
250
+ });
251
+ }
252
+ case "object-tool": {
253
+ return __spreadProps(__spreadValues({}, baseArgs), {
254
+ tool_choice: { type: "function", function: { name: mode.tool.name } },
255
+ tools: [
256
+ {
257
+ type: "function",
258
+ function: {
259
+ name: mode.tool.name,
260
+ description: mode.tool.description,
261
+ parameters: mode.tool.parameters
262
+ }
263
+ }
264
+ ]
265
+ });
266
+ }
267
+ case "object-grammar": {
268
+ throw new import_provider.UnsupportedFunctionalityError({
269
+ functionality: "object-grammar mode"
270
+ });
271
+ }
272
+ default: {
273
+ const _exhaustiveCheck = type;
274
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
275
+ }
276
+ }
277
+ }
278
+ async doGenerate(options) {
279
+ var _b, _c;
280
+ const args = this.getArgs(options);
281
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
282
+ url: this.config.url({
283
+ path: "/chat/completions",
284
+ modelId: this.modelId
285
+ }),
286
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
287
+ body: args,
288
+ failedResponseHandler: openrouterFailedResponseHandler,
289
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
290
+ openAIChatResponseSchema
291
+ ),
292
+ abortSignal: options.abortSignal,
293
+ fetch: this.config.fetch
294
+ });
295
+ const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
296
+ const choice = response.choices[0];
297
+ if (choice == null) {
298
+ throw new Error("No choice in response");
299
+ }
300
+ return {
301
+ text: (_b = choice.message.content) != null ? _b : void 0,
302
+ toolCalls: (_c = choice.message.tool_calls) == null ? void 0 : _c.map((toolCall) => {
303
+ var _a2;
304
+ return {
305
+ toolCallType: "function",
306
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
307
+ toolName: toolCall.function.name,
308
+ args: toolCall.function.arguments
309
+ };
310
+ }),
311
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
312
+ usage: {
313
+ promptTokens: response.usage.prompt_tokens,
314
+ completionTokens: response.usage.completion_tokens
315
+ },
316
+ rawCall: { rawPrompt, rawSettings },
317
+ rawResponse: { headers: responseHeaders },
318
+ warnings: [],
319
+ logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
320
+ };
321
+ }
322
+ async doStream(options) {
323
+ const args = this.getArgs(options);
324
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
325
+ url: this.config.url({
326
+ path: "/chat/completions",
327
+ modelId: this.modelId
328
+ }),
329
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
330
+ body: __spreadProps(__spreadValues({}, args), {
331
+ stream: true,
332
+ // only include stream_options when in strict compatibility mode:
333
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
334
+ }),
335
+ failedResponseHandler: openrouterFailedResponseHandler,
336
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
337
+ openrouterChatChunkSchema
338
+ ),
339
+ abortSignal: options.abortSignal,
340
+ fetch: this.config.fetch
341
+ });
342
+ const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
343
+ const toolCalls = [];
344
+ let finishReason = "other";
345
+ let usage = {
346
+ promptTokens: Number.NaN,
347
+ completionTokens: Number.NaN
348
+ };
349
+ let logprobs;
350
+ return {
351
+ stream: response.pipeThrough(
352
+ new TransformStream({
353
+ transform(chunk, controller) {
354
+ var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
355
+ if (!chunk.success) {
356
+ finishReason = "error";
357
+ controller.enqueue({ type: "error", error: chunk.error });
358
+ return;
359
+ }
360
+ const value = chunk.value;
361
+ if ("error" in value) {
362
+ finishReason = "error";
363
+ controller.enqueue({ type: "error", error: value.error });
364
+ return;
365
+ }
366
+ if (value.usage != null) {
367
+ usage = {
368
+ promptTokens: value.usage.prompt_tokens,
369
+ completionTokens: value.usage.completion_tokens
370
+ };
371
+ }
372
+ const choice = value.choices[0];
373
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
374
+ finishReason = mapOpenRouterFinishReason(choice.finish_reason);
375
+ }
376
+ if ((choice == null ? void 0 : choice.delta) == null) {
377
+ return;
378
+ }
379
+ const delta = choice.delta;
380
+ if (delta.content != null) {
381
+ controller.enqueue({
382
+ type: "text-delta",
383
+ textDelta: delta.content
384
+ });
385
+ }
386
+ const mappedLogprobs = mapOpenRouterChatLogProbsOutput(
387
+ choice == null ? void 0 : choice.logprobs
388
+ );
389
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
390
+ if (logprobs === void 0) logprobs = [];
391
+ logprobs.push(...mappedLogprobs);
392
+ }
393
+ if (delta.tool_calls != null) {
394
+ for (const toolCallDelta of delta.tool_calls) {
395
+ const index = toolCallDelta.index;
396
+ if (toolCalls[index] == null) {
397
+ if (toolCallDelta.type !== "function") {
398
+ throw new import_provider.InvalidResponseDataError({
399
+ data: toolCallDelta,
400
+ message: `Expected 'function' type.`
401
+ });
402
+ }
403
+ if (toolCallDelta.id == null) {
404
+ throw new import_provider.InvalidResponseDataError({
405
+ data: toolCallDelta,
406
+ message: `Expected 'id' to be a string.`
407
+ });
408
+ }
409
+ if (((_a2 = toolCallDelta.function) == null ? void 0 : _a2.name) == null) {
410
+ throw new import_provider.InvalidResponseDataError({
411
+ data: toolCallDelta,
412
+ message: `Expected 'function.name' to be a string.`
413
+ });
414
+ }
415
+ toolCalls[index] = {
416
+ id: toolCallDelta.id,
417
+ type: "function",
418
+ function: {
419
+ name: toolCallDelta.function.name,
420
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
421
+ }
422
+ };
423
+ const toolCall2 = toolCalls[index];
424
+ if (toolCall2 == null) {
425
+ throw new Error("Tool call is missing");
426
+ }
427
+ if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
428
+ controller.enqueue({
429
+ type: "tool-call-delta",
430
+ toolCallType: "function",
431
+ toolCallId: toolCall2.id,
432
+ toolName: toolCall2.function.name,
433
+ argsTextDelta: toolCall2.function.arguments
434
+ });
435
+ controller.enqueue({
436
+ type: "tool-call",
437
+ toolCallType: "function",
438
+ toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils3.generateId)(),
439
+ toolName: toolCall2.function.name,
440
+ args: toolCall2.function.arguments
441
+ });
442
+ }
443
+ continue;
444
+ }
445
+ const toolCall = toolCalls[index];
446
+ if (toolCall == null) {
447
+ throw new Error("Tool call is missing");
448
+ }
449
+ if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
450
+ toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
451
+ }
452
+ controller.enqueue({
453
+ type: "tool-call-delta",
454
+ toolCallType: "function",
455
+ toolCallId: toolCall.id,
456
+ toolName: toolCall.function.name,
457
+ argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
458
+ });
459
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
460
+ controller.enqueue({
461
+ type: "tool-call",
462
+ toolCallType: "function",
463
+ toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils3.generateId)(),
464
+ toolName: toolCall.function.name,
465
+ args: toolCall.function.arguments
466
+ });
467
+ }
468
+ }
469
+ }
470
+ },
471
+ flush(controller) {
472
+ controller.enqueue({
473
+ type: "finish",
474
+ finishReason,
475
+ logprobs,
476
+ usage
477
+ });
478
+ }
479
+ })
480
+ ),
481
+ rawCall: { rawPrompt, rawSettings },
482
+ rawResponse: { headers: responseHeaders },
483
+ warnings: []
484
+ };
485
+ }
486
+ };
487
+ var openAIChatResponseSchema = import_zod2.z.object({
488
+ choices: import_zod2.z.array(
489
+ import_zod2.z.object({
490
+ message: import_zod2.z.object({
491
+ role: import_zod2.z.literal("assistant"),
492
+ content: import_zod2.z.string().nullable().optional(),
493
+ tool_calls: import_zod2.z.array(
494
+ import_zod2.z.object({
495
+ id: import_zod2.z.string().optional().nullable(),
496
+ type: import_zod2.z.literal("function"),
497
+ function: import_zod2.z.object({
498
+ name: import_zod2.z.string(),
499
+ arguments: import_zod2.z.string()
500
+ })
501
+ })
502
+ ).optional()
503
+ }),
504
+ index: import_zod2.z.number(),
505
+ logprobs: import_zod2.z.object({
506
+ content: import_zod2.z.array(
507
+ import_zod2.z.object({
508
+ token: import_zod2.z.string(),
509
+ logprob: import_zod2.z.number(),
510
+ top_logprobs: import_zod2.z.array(
511
+ import_zod2.z.object({
512
+ token: import_zod2.z.string(),
513
+ logprob: import_zod2.z.number()
514
+ })
515
+ )
516
+ })
517
+ ).nullable()
518
+ }).nullable().optional(),
519
+ finish_reason: import_zod2.z.string().optional().nullable()
520
+ })
521
+ ),
522
+ usage: import_zod2.z.object({
523
+ prompt_tokens: import_zod2.z.number(),
524
+ completion_tokens: import_zod2.z.number()
525
+ })
526
+ });
527
+ var openrouterChatChunkSchema = import_zod2.z.union([
528
+ import_zod2.z.object({
529
+ choices: import_zod2.z.array(
530
+ import_zod2.z.object({
531
+ delta: import_zod2.z.object({
532
+ role: import_zod2.z.enum(["assistant"]).optional(),
533
+ content: import_zod2.z.string().nullish(),
534
+ tool_calls: import_zod2.z.array(
535
+ import_zod2.z.object({
536
+ index: import_zod2.z.number(),
537
+ id: import_zod2.z.string().nullish(),
538
+ type: import_zod2.z.literal("function").optional(),
539
+ function: import_zod2.z.object({
540
+ name: import_zod2.z.string().nullish(),
541
+ arguments: import_zod2.z.string().nullish()
542
+ })
543
+ })
544
+ ).nullish()
545
+ }).nullish(),
546
+ logprobs: import_zod2.z.object({
547
+ content: import_zod2.z.array(
548
+ import_zod2.z.object({
549
+ token: import_zod2.z.string(),
550
+ logprob: import_zod2.z.number(),
551
+ top_logprobs: import_zod2.z.array(
552
+ import_zod2.z.object({
553
+ token: import_zod2.z.string(),
554
+ logprob: import_zod2.z.number()
555
+ })
556
+ )
557
+ })
558
+ ).nullable()
559
+ }).nullish(),
560
+ finish_reason: import_zod2.z.string().nullable().optional(),
561
+ index: import_zod2.z.number()
562
+ })
563
+ ),
564
+ usage: import_zod2.z.object({
565
+ prompt_tokens: import_zod2.z.number(),
566
+ completion_tokens: import_zod2.z.number()
567
+ }).nullish()
568
+ }),
569
+ openAIErrorDataSchema
570
+ ]);
571
+ function prepareToolsAndToolChoice(mode) {
572
+ var _a;
573
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
574
+ if (tools == null) {
575
+ return { tools: void 0, tool_choice: void 0 };
576
+ }
577
+ const mappedTools = tools.map((tool) => ({
578
+ type: "function",
579
+ function: {
580
+ name: tool.name,
581
+ description: tool.description,
582
+ parameters: tool.parameters
583
+ }
584
+ }));
585
+ const toolChoice = mode.toolChoice;
586
+ if (toolChoice == null) {
587
+ return { tools: mappedTools, tool_choice: void 0 };
588
+ }
589
+ const type = toolChoice.type;
590
+ switch (type) {
591
+ case "auto":
592
+ case "none":
593
+ case "required":
594
+ return { tools: mappedTools, tool_choice: type };
595
+ case "tool":
596
+ return {
597
+ tools: mappedTools,
598
+ tool_choice: {
599
+ type: "function",
600
+ function: {
601
+ name: toolChoice.toolName
602
+ }
603
+ }
604
+ };
605
+ default: {
606
+ const _exhaustiveCheck = type;
607
+ throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
608
+ }
609
+ }
610
+ }
611
+
612
+ // src/openrouter-completion-language-model.ts
613
+ var import_provider3 = require("@ai-sdk/provider");
614
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
615
+ var import_zod3 = require("zod");
616
+
617
+ // src/convert-to-openrouter-completion-prompt.ts
618
+ var import_provider2 = require("@ai-sdk/provider");
619
+ function convertToOpenRouterCompletionPrompt({
620
+ prompt,
621
+ inputFormat,
622
+ user = "user",
623
+ assistant = "assistant"
624
+ }) {
625
+ if (inputFormat === "prompt" && prompt.length === 1 && prompt[0] && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0] && prompt[0].content[0].type === "text") {
626
+ return { prompt: prompt[0].content[0].text };
627
+ }
628
+ let text = "";
629
+ if (prompt[0] && prompt[0].role === "system") {
630
+ text += `${prompt[0].content}
631
+
632
+ `;
633
+ prompt = prompt.slice(1);
634
+ }
635
+ for (const { role, content } of prompt) {
636
+ switch (role) {
637
+ case "system": {
638
+ throw new import_provider2.InvalidPromptError({
639
+ message: "Unexpected system message in prompt: ${content}",
640
+ prompt
641
+ });
642
+ }
643
+ case "user": {
644
+ const userMessage = content.map((part) => {
645
+ switch (part.type) {
646
+ case "text": {
647
+ return part.text;
648
+ }
649
+ case "image": {
650
+ throw new import_provider2.UnsupportedFunctionalityError({
651
+ functionality: "images"
652
+ });
653
+ }
654
+ }
655
+ }).join("");
656
+ text += `${user}:
657
+ ${userMessage}
658
+
659
+ `;
660
+ break;
661
+ }
662
+ case "assistant": {
663
+ const assistantMessage = content.map((part) => {
664
+ switch (part.type) {
665
+ case "text": {
666
+ return part.text;
667
+ }
668
+ case "tool-call": {
669
+ throw new import_provider2.UnsupportedFunctionalityError({
670
+ functionality: "tool-call messages"
671
+ });
672
+ }
673
+ }
674
+ }).join("");
675
+ text += `${assistant}:
676
+ ${assistantMessage}
677
+
678
+ `;
679
+ break;
680
+ }
681
+ case "tool": {
682
+ throw new import_provider2.UnsupportedFunctionalityError({
683
+ functionality: "tool messages"
684
+ });
685
+ }
686
+ default: {
687
+ const _exhaustiveCheck = role;
688
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
689
+ }
690
+ }
691
+ }
692
+ text += `${assistant}:
693
+ `;
694
+ return {
695
+ prompt: text,
696
+ stopSequences: [`
697
+ ${user}:`]
698
+ };
699
+ }
700
+
701
+ // src/map-openrouter-completion-logprobs.ts
702
+ function mapOpenRouterCompletionLogProbs(logprobs) {
703
+ return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => {
704
+ var _a, _b;
705
+ return {
706
+ token,
707
+ logprob: (_a = logprobs.token_logprobs[index]) != null ? _a : 0,
708
+ topLogprobs: logprobs.top_logprobs ? Object.entries((_b = logprobs.top_logprobs[index]) != null ? _b : {}).map(
709
+ ([token2, logprob]) => ({
710
+ token: token2,
711
+ logprob
712
+ })
713
+ ) : []
714
+ };
715
+ });
716
+ }
717
+
718
+ // src/openrouter-completion-language-model.ts
719
+ var OpenRouterCompletionLanguageModel = class {
720
+ constructor(modelId, settings, config) {
721
+ this.specificationVersion = "v1";
722
+ this.defaultObjectGenerationMode = void 0;
723
+ this.modelId = modelId;
724
+ this.settings = settings;
725
+ this.config = config;
726
+ }
727
+ get provider() {
728
+ return this.config.provider;
729
+ }
730
+ getArgs({
731
+ mode,
732
+ inputFormat,
733
+ prompt,
734
+ maxTokens,
735
+ temperature,
736
+ topP,
737
+ frequencyPenalty,
738
+ presencePenalty,
739
+ seed
740
+ }) {
741
+ var _a;
742
+ const type = mode.type;
743
+ const { prompt: completionPrompt, stopSequences } = convertToOpenRouterCompletionPrompt({ prompt, inputFormat });
744
+ const baseArgs = {
745
+ // model id:
746
+ model: this.modelId,
747
+ // model specific settings:
748
+ echo: this.settings.echo,
749
+ logit_bias: this.settings.logitBias,
750
+ logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
751
+ suffix: this.settings.suffix,
752
+ user: this.settings.user,
753
+ // standardized settings:
754
+ max_tokens: maxTokens,
755
+ temperature,
756
+ top_p: topP,
757
+ frequency_penalty: frequencyPenalty,
758
+ presence_penalty: presencePenalty,
759
+ seed,
760
+ // prompt:
761
+ prompt: completionPrompt,
762
+ // stop sequences:
763
+ stop: stopSequences
764
+ };
765
+ switch (type) {
766
+ case "regular": {
767
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
768
+ throw new import_provider3.UnsupportedFunctionalityError({
769
+ functionality: "tools"
770
+ });
771
+ }
772
+ if (mode.toolChoice) {
773
+ throw new import_provider3.UnsupportedFunctionalityError({
774
+ functionality: "toolChoice"
775
+ });
776
+ }
777
+ return baseArgs;
778
+ }
779
+ case "object-json": {
780
+ throw new import_provider3.UnsupportedFunctionalityError({
781
+ functionality: "object-json mode"
782
+ });
783
+ }
784
+ case "object-tool": {
785
+ throw new import_provider3.UnsupportedFunctionalityError({
786
+ functionality: "object-tool mode"
787
+ });
788
+ }
789
+ case "object-grammar": {
790
+ throw new import_provider3.UnsupportedFunctionalityError({
791
+ functionality: "object-grammar mode"
792
+ });
793
+ }
794
+ default: {
795
+ const _exhaustiveCheck = type;
796
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
797
+ }
798
+ }
799
+ }
800
+ async doGenerate(options) {
801
+ const args = this.getArgs(options);
802
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
803
+ url: this.config.url({
804
+ path: "/completions",
805
+ modelId: this.modelId
806
+ }),
807
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
808
+ body: args,
809
+ failedResponseHandler: openrouterFailedResponseHandler,
810
+ successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
811
+ openAICompletionResponseSchema
812
+ ),
813
+ abortSignal: options.abortSignal,
814
+ fetch: this.config.fetch
815
+ });
816
+ const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
817
+ const choice = response.choices[0];
818
+ if (!choice) {
819
+ throw new Error("No choice in OpenRouter completion response");
820
+ }
821
+ return {
822
+ text: choice.text,
823
+ usage: {
824
+ promptTokens: response.usage.prompt_tokens,
825
+ completionTokens: response.usage.completion_tokens
826
+ },
827
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
828
+ logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
829
+ rawCall: { rawPrompt, rawSettings },
830
+ rawResponse: { headers: responseHeaders },
831
+ warnings: []
832
+ };
833
+ }
834
+ async doStream(options) {
835
+ const args = this.getArgs(options);
836
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
837
+ url: this.config.url({
838
+ path: "/completions",
839
+ modelId: this.modelId
840
+ }),
841
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
842
+ body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
843
+ stream: true,
844
+ // only include stream_options when in strict compatibility mode:
845
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
846
+ }),
847
+ failedResponseHandler: openrouterFailedResponseHandler,
848
+ successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
849
+ openrouterCompletionChunkSchema
850
+ ),
851
+ abortSignal: options.abortSignal,
852
+ fetch: this.config.fetch
853
+ });
854
+ const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
855
+ let finishReason = "other";
856
+ let usage = {
857
+ promptTokens: Number.NaN,
858
+ completionTokens: Number.NaN
859
+ };
860
+ let logprobs;
861
+ return {
862
+ stream: response.pipeThrough(
863
+ new TransformStream({
864
+ transform(chunk, controller) {
865
+ if (!chunk.success) {
866
+ finishReason = "error";
867
+ controller.enqueue({ type: "error", error: chunk.error });
868
+ return;
869
+ }
870
+ const value = chunk.value;
871
+ if ("error" in value) {
872
+ finishReason = "error";
873
+ controller.enqueue({ type: "error", error: value.error });
874
+ return;
875
+ }
876
+ if (value.usage != null) {
877
+ usage = {
878
+ promptTokens: value.usage.prompt_tokens,
879
+ completionTokens: value.usage.completion_tokens
880
+ };
881
+ }
882
+ const choice = value.choices[0];
883
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
884
+ finishReason = mapOpenRouterFinishReason(choice.finish_reason);
885
+ }
886
+ if ((choice == null ? void 0 : choice.text) != null) {
887
+ controller.enqueue({
888
+ type: "text-delta",
889
+ textDelta: choice.text
890
+ });
891
+ }
892
+ const mappedLogprobs = mapOpenRouterCompletionLogProbs(
893
+ choice == null ? void 0 : choice.logprobs
894
+ );
895
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
896
+ if (logprobs === void 0) logprobs = [];
897
+ logprobs.push(...mappedLogprobs);
898
+ }
899
+ },
900
+ flush(controller) {
901
+ controller.enqueue({
902
+ type: "finish",
903
+ finishReason,
904
+ logprobs,
905
+ usage
906
+ });
907
+ }
908
+ })
909
+ ),
910
+ rawCall: { rawPrompt, rawSettings },
911
+ rawResponse: { headers: responseHeaders },
912
+ warnings: []
913
+ };
914
+ }
915
+ };
916
+ var openAICompletionResponseSchema = import_zod3.z.object({
917
+ choices: import_zod3.z.array(
918
+ import_zod3.z.object({
919
+ text: import_zod3.z.string(),
920
+ finish_reason: import_zod3.z.string(),
921
+ logprobs: import_zod3.z.object({
922
+ tokens: import_zod3.z.array(import_zod3.z.string()),
923
+ token_logprobs: import_zod3.z.array(import_zod3.z.number()),
924
+ top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable()
925
+ }).nullable().optional()
926
+ })
927
+ ),
928
+ usage: import_zod3.z.object({
929
+ prompt_tokens: import_zod3.z.number(),
930
+ completion_tokens: import_zod3.z.number()
931
+ })
932
+ });
933
+ var openrouterCompletionChunkSchema = import_zod3.z.union([
934
+ import_zod3.z.object({
935
+ choices: import_zod3.z.array(
936
+ import_zod3.z.object({
937
+ text: import_zod3.z.string(),
938
+ finish_reason: import_zod3.z.string().nullish(),
939
+ index: import_zod3.z.number(),
940
+ logprobs: import_zod3.z.object({
941
+ tokens: import_zod3.z.array(import_zod3.z.string()),
942
+ token_logprobs: import_zod3.z.array(import_zod3.z.number()),
943
+ top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable()
944
+ }).nullable().optional()
945
+ })
946
+ ),
947
+ usage: import_zod3.z.object({
948
+ prompt_tokens: import_zod3.z.number(),
949
+ completion_tokens: import_zod3.z.number()
950
+ }).optional().nullable()
951
+ }),
952
+ openAIErrorDataSchema
953
+ ]);
954
+
955
+ // src/openrouter-facade.ts
956
+ var OpenRouter = class {
957
+ /**
958
+ * Creates a new OpenRouter provider instance.
959
+ */
960
+ constructor(options = {}) {
961
+ var _a, _b;
962
+ this.baseURL = (_b = (0, import_provider_utils5.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
963
+ this.apiKey = options.apiKey;
964
+ this.organization = options.organization;
965
+ this.project = options.project;
966
+ this.headers = options.headers;
967
+ }
968
+ get baseConfig() {
969
+ return {
970
+ organization: this.organization,
971
+ baseURL: this.baseURL,
972
+ headers: () => __spreadValues({
973
+ Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
974
+ apiKey: this.apiKey,
975
+ environmentVariableName: "OPENAI_API_KEY",
976
+ description: "OpenRouter"
977
+ })}`,
978
+ "OpenRouter-Organization": this.organization,
979
+ "OpenRouter-Project": this.project
980
+ }, this.headers)
981
+ };
982
+ }
983
+ chat(modelId, settings = {}) {
984
+ return new OpenRouterChatLanguageModel(modelId, settings, __spreadProps(__spreadValues({
985
+ provider: "openrouter.chat"
986
+ }, this.baseConfig), {
987
+ compatibility: "strict",
988
+ url: ({ path }) => `${this.baseURL}${path}`
989
+ }));
990
+ }
991
+ completion(modelId, settings = {}) {
992
+ return new OpenRouterCompletionLanguageModel(modelId, settings, __spreadProps(__spreadValues({
993
+ provider: "openrouter.completion"
994
+ }, this.baseConfig), {
995
+ compatibility: "strict",
996
+ url: ({ path }) => `${this.baseURL}${path}`
997
+ }));
998
+ }
999
+ };
1000
+
1001
+ // src/openrouter-provider.ts
1002
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1003
+ function createOpenRouter(options = {}) {
1004
+ var _a, _b, _c;
1005
+ const baseURL = (_b = (0, import_provider_utils6.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://openrouter.ai/api/v1";
1006
+ const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
1007
+ const getHeaders = () => __spreadValues({
1008
+ Authorization: `Bearer ${(0, import_provider_utils6.loadApiKey)({
1009
+ apiKey: options.apiKey,
1010
+ environmentVariableName: "OPENAI_API_KEY",
1011
+ description: "OpenRouter"
1012
+ })}`,
1013
+ "OpenRouter-Organization": options.organization,
1014
+ "OpenRouter-Project": options.project
1015
+ }, options.headers);
1016
+ const createChatModel = (modelId, settings = {}) => new OpenRouterChatLanguageModel(modelId, settings, {
1017
+ provider: "openrouter.chat",
1018
+ url: ({ path }) => `${baseURL}${path}`,
1019
+ headers: getHeaders,
1020
+ compatibility,
1021
+ fetch: options.fetch
1022
+ });
1023
+ const createCompletionModel = (modelId, settings = {}) => new OpenRouterCompletionLanguageModel(modelId, settings, {
1024
+ provider: "openrouter.completion",
1025
+ url: ({ path }) => `${baseURL}${path}`,
1026
+ headers: getHeaders,
1027
+ compatibility,
1028
+ fetch: options.fetch
1029
+ });
1030
+ const createLanguageModel = (modelId, settings) => {
1031
+ if (new.target) {
1032
+ throw new Error(
1033
+ "The OpenRouter model function cannot be called with the new keyword."
1034
+ );
1035
+ }
1036
+ if (modelId === "openai/gpt-3.5-turbo-instruct") {
1037
+ return createCompletionModel(
1038
+ modelId,
1039
+ settings
1040
+ );
1041
+ }
1042
+ return createChatModel(modelId, settings);
1043
+ };
1044
+ const provider = function(modelId, settings) {
1045
+ return createLanguageModel(modelId, settings);
1046
+ };
1047
+ provider.languageModel = createLanguageModel;
1048
+ provider.chat = createChatModel;
1049
+ provider.completion = createCompletionModel;
1050
+ return provider;
1051
+ }
1052
+ var openrouter = createOpenRouter({
1053
+ compatibility: "strict"
1054
+ // strict for OpenRouter API
1055
+ });
1056
+ // Annotate the CommonJS export names for ESM import in node:
1057
+ 0 && (module.exports = {
1058
+ OpenRouter,
1059
+ createOpenRouter,
1060
+ openrouter
1061
+ });
1062
+ //# sourceMappingURL=index.js.map