@openrouter/ai-sdk-provider 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,950 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __defProps = Object.defineProperties;
3
+ var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
4
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
7
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
8
+ var __spreadValues = (a, b) => {
9
+ for (var prop in b || (b = {}))
10
+ if (__hasOwnProp.call(b, prop))
11
+ __defNormalProp(a, prop, b[prop]);
12
+ if (__getOwnPropSymbols)
13
+ for (var prop of __getOwnPropSymbols(b)) {
14
+ if (__propIsEnum.call(b, prop))
15
+ __defNormalProp(a, prop, b[prop]);
16
+ }
17
+ return a;
18
+ };
19
+ var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
20
+ var __objRest = (source, exclude) => {
21
+ var target = {};
22
+ for (var prop in source)
23
+ if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
24
+ target[prop] = source[prop];
25
+ if (source != null && __getOwnPropSymbols)
26
+ for (var prop of __getOwnPropSymbols(source)) {
27
+ if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
28
+ target[prop] = source[prop];
29
+ }
30
+ return target;
31
+ };
32
+
33
+ // src/openrouter-chat-language-model.ts
34
+ import {
35
+ InvalidResponseDataError,
36
+ UnsupportedFunctionalityError
37
+ } from "@ai-sdk/provider";
38
+ import {
39
+ combineHeaders,
40
+ createEventSourceResponseHandler,
41
+ createJsonResponseHandler,
42
+ generateId,
43
+ isParsableJson,
44
+ postJsonToApi
45
+ } from "@ai-sdk/provider-utils";
46
+ import { z as z2 } from "zod";
47
+
48
+ // src/convert-to-openrouter-chat-messages.ts
49
+ import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
50
+ function convertToOpenRouterChatMessages(prompt) {
51
+ var _a;
52
+ const messages = [];
53
+ for (const { role, content } of prompt) {
54
+ switch (role) {
55
+ case "system": {
56
+ messages.push({ role: "system", content });
57
+ break;
58
+ }
59
+ case "user": {
60
+ if (content.length === 1 && ((_a = content[0]) == null ? void 0 : _a.type) === "text") {
61
+ messages.push({ role: "user", content: content[0].text });
62
+ break;
63
+ }
64
+ messages.push({
65
+ role: "user",
66
+ content: content.map((part) => {
67
+ var _a2;
68
+ switch (part.type) {
69
+ case "text": {
70
+ return { type: "text", text: part.text };
71
+ }
72
+ case "image": {
73
+ return {
74
+ type: "image_url",
75
+ image_url: {
76
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a2 = part.mimeType) != null ? _a2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
77
+ }
78
+ };
79
+ }
80
+ }
81
+ })
82
+ });
83
+ break;
84
+ }
85
+ case "assistant": {
86
+ let text = "";
87
+ const toolCalls = [];
88
+ for (const part of content) {
89
+ switch (part.type) {
90
+ case "text": {
91
+ text += part.text;
92
+ break;
93
+ }
94
+ case "tool-call": {
95
+ toolCalls.push({
96
+ id: part.toolCallId,
97
+ type: "function",
98
+ function: {
99
+ name: part.toolName,
100
+ arguments: JSON.stringify(part.args)
101
+ }
102
+ });
103
+ break;
104
+ }
105
+ default: {
106
+ const _exhaustiveCheck = part;
107
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
108
+ }
109
+ }
110
+ }
111
+ messages.push({
112
+ role: "assistant",
113
+ content: text,
114
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
115
+ });
116
+ break;
117
+ }
118
+ case "tool": {
119
+ for (const toolResponse of content) {
120
+ messages.push({
121
+ role: "tool",
122
+ tool_call_id: toolResponse.toolCallId,
123
+ content: JSON.stringify(toolResponse.result)
124
+ });
125
+ }
126
+ break;
127
+ }
128
+ default: {
129
+ const _exhaustiveCheck = role;
130
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
131
+ }
132
+ }
133
+ }
134
+ return messages;
135
+ }
136
+
137
+ // src/map-openrouter-chat-logprobs.ts
138
+ function mapOpenRouterChatLogProbsOutput(logprobs) {
139
+ var _a, _b;
140
+ return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
141
+ token,
142
+ logprob,
143
+ topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
144
+ token: token2,
145
+ logprob: logprob2
146
+ })) : []
147
+ }))) != null ? _b : void 0;
148
+ }
149
+
150
+ // src/map-openrouter-finish-reason.ts
151
+ function mapOpenRouterFinishReason(finishReason) {
152
+ switch (finishReason) {
153
+ case "stop":
154
+ return "stop";
155
+ case "length":
156
+ return "length";
157
+ case "content_filter":
158
+ return "content-filter";
159
+ case "function_call":
160
+ case "tool_calls":
161
+ return "tool-calls";
162
+ default:
163
+ return "unknown";
164
+ }
165
+ }
166
+
167
+ // src/openrouter-error.ts
168
+ import { z } from "zod";
169
+ import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
170
+ var openAIErrorDataSchema = z.object({
171
+ error: z.object({
172
+ message: z.string(),
173
+ type: z.string(),
174
+ param: z.any().nullable(),
175
+ code: z.string().nullable()
176
+ })
177
+ });
178
+ var openrouterFailedResponseHandler = createJsonErrorResponseHandler({
179
+ errorSchema: openAIErrorDataSchema,
180
+ errorToMessage: (data) => data.error.message
181
+ });
182
+
183
+ // src/openrouter-chat-language-model.ts
184
+ var OpenRouterChatLanguageModel = class {
185
+ constructor(modelId, settings, config) {
186
+ this.specificationVersion = "v1";
187
+ this.defaultObjectGenerationMode = "tool";
188
+ this.modelId = modelId;
189
+ this.settings = settings;
190
+ this.config = config;
191
+ }
192
+ get provider() {
193
+ return this.config.provider;
194
+ }
195
+ getArgs({
196
+ mode,
197
+ prompt,
198
+ maxTokens,
199
+ temperature,
200
+ topP,
201
+ frequencyPenalty,
202
+ presencePenalty,
203
+ seed
204
+ }) {
205
+ const type = mode.type;
206
+ const baseArgs = {
207
+ // model id:
208
+ model: this.modelId,
209
+ // model specific settings:
210
+ logit_bias: this.settings.logitBias,
211
+ logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
212
+ top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
213
+ user: this.settings.user,
214
+ parallel_tool_calls: this.settings.parallelToolCalls,
215
+ // standardized settings:
216
+ max_tokens: maxTokens,
217
+ temperature,
218
+ top_p: topP,
219
+ frequency_penalty: frequencyPenalty,
220
+ presence_penalty: presencePenalty,
221
+ seed,
222
+ // messages:
223
+ messages: convertToOpenRouterChatMessages(prompt)
224
+ };
225
+ switch (type) {
226
+ case "regular": {
227
+ return __spreadValues(__spreadValues({}, baseArgs), prepareToolsAndToolChoice(mode));
228
+ }
229
+ case "object-json": {
230
+ return __spreadProps(__spreadValues({}, baseArgs), {
231
+ response_format: { type: "json_object" }
232
+ });
233
+ }
234
+ case "object-tool": {
235
+ return __spreadProps(__spreadValues({}, baseArgs), {
236
+ tool_choice: { type: "function", function: { name: mode.tool.name } },
237
+ tools: [
238
+ {
239
+ type: "function",
240
+ function: {
241
+ name: mode.tool.name,
242
+ description: mode.tool.description,
243
+ parameters: mode.tool.parameters
244
+ }
245
+ }
246
+ ]
247
+ });
248
+ }
249
+ case "object-grammar": {
250
+ throw new UnsupportedFunctionalityError({
251
+ functionality: "object-grammar mode"
252
+ });
253
+ }
254
+ default: {
255
+ const _exhaustiveCheck = type;
256
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
257
+ }
258
+ }
259
+ }
260
+ async doGenerate(options) {
261
+ var _b, _c;
262
+ const args = this.getArgs(options);
263
+ const { responseHeaders, value: response } = await postJsonToApi({
264
+ url: this.config.url({
265
+ path: "/chat/completions",
266
+ modelId: this.modelId
267
+ }),
268
+ headers: combineHeaders(this.config.headers(), options.headers),
269
+ body: args,
270
+ failedResponseHandler: openrouterFailedResponseHandler,
271
+ successfulResponseHandler: createJsonResponseHandler(
272
+ openAIChatResponseSchema
273
+ ),
274
+ abortSignal: options.abortSignal,
275
+ fetch: this.config.fetch
276
+ });
277
+ const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
278
+ const choice = response.choices[0];
279
+ if (choice == null) {
280
+ throw new Error("No choice in response");
281
+ }
282
+ return {
283
+ text: (_b = choice.message.content) != null ? _b : void 0,
284
+ toolCalls: (_c = choice.message.tool_calls) == null ? void 0 : _c.map((toolCall) => {
285
+ var _a2;
286
+ return {
287
+ toolCallType: "function",
288
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
289
+ toolName: toolCall.function.name,
290
+ args: toolCall.function.arguments
291
+ };
292
+ }),
293
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
294
+ usage: {
295
+ promptTokens: response.usage.prompt_tokens,
296
+ completionTokens: response.usage.completion_tokens
297
+ },
298
+ rawCall: { rawPrompt, rawSettings },
299
+ rawResponse: { headers: responseHeaders },
300
+ warnings: [],
301
+ logprobs: mapOpenRouterChatLogProbsOutput(choice.logprobs)
302
+ };
303
+ }
304
+ async doStream(options) {
305
+ const args = this.getArgs(options);
306
+ const { responseHeaders, value: response } = await postJsonToApi({
307
+ url: this.config.url({
308
+ path: "/chat/completions",
309
+ modelId: this.modelId
310
+ }),
311
+ headers: combineHeaders(this.config.headers(), options.headers),
312
+ body: __spreadProps(__spreadValues({}, args), {
313
+ stream: true,
314
+ // only include stream_options when in strict compatibility mode:
315
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
316
+ }),
317
+ failedResponseHandler: openrouterFailedResponseHandler,
318
+ successfulResponseHandler: createEventSourceResponseHandler(
319
+ openrouterChatChunkSchema
320
+ ),
321
+ abortSignal: options.abortSignal,
322
+ fetch: this.config.fetch
323
+ });
324
+ const _a = args, { messages: rawPrompt } = _a, rawSettings = __objRest(_a, ["messages"]);
325
+ const toolCalls = [];
326
+ let finishReason = "other";
327
+ let usage = {
328
+ promptTokens: Number.NaN,
329
+ completionTokens: Number.NaN
330
+ };
331
+ let logprobs;
332
+ return {
333
+ stream: response.pipeThrough(
334
+ new TransformStream({
335
+ transform(chunk, controller) {
336
+ var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
337
+ if (!chunk.success) {
338
+ finishReason = "error";
339
+ controller.enqueue({ type: "error", error: chunk.error });
340
+ return;
341
+ }
342
+ const value = chunk.value;
343
+ if ("error" in value) {
344
+ finishReason = "error";
345
+ controller.enqueue({ type: "error", error: value.error });
346
+ return;
347
+ }
348
+ if (value.usage != null) {
349
+ usage = {
350
+ promptTokens: value.usage.prompt_tokens,
351
+ completionTokens: value.usage.completion_tokens
352
+ };
353
+ }
354
+ const choice = value.choices[0];
355
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
356
+ finishReason = mapOpenRouterFinishReason(choice.finish_reason);
357
+ }
358
+ if ((choice == null ? void 0 : choice.delta) == null) {
359
+ return;
360
+ }
361
+ const delta = choice.delta;
362
+ if (delta.content != null) {
363
+ controller.enqueue({
364
+ type: "text-delta",
365
+ textDelta: delta.content
366
+ });
367
+ }
368
+ const mappedLogprobs = mapOpenRouterChatLogProbsOutput(
369
+ choice == null ? void 0 : choice.logprobs
370
+ );
371
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
372
+ if (logprobs === void 0) logprobs = [];
373
+ logprobs.push(...mappedLogprobs);
374
+ }
375
+ if (delta.tool_calls != null) {
376
+ for (const toolCallDelta of delta.tool_calls) {
377
+ const index = toolCallDelta.index;
378
+ if (toolCalls[index] == null) {
379
+ if (toolCallDelta.type !== "function") {
380
+ throw new InvalidResponseDataError({
381
+ data: toolCallDelta,
382
+ message: `Expected 'function' type.`
383
+ });
384
+ }
385
+ if (toolCallDelta.id == null) {
386
+ throw new InvalidResponseDataError({
387
+ data: toolCallDelta,
388
+ message: `Expected 'id' to be a string.`
389
+ });
390
+ }
391
+ if (((_a2 = toolCallDelta.function) == null ? void 0 : _a2.name) == null) {
392
+ throw new InvalidResponseDataError({
393
+ data: toolCallDelta,
394
+ message: `Expected 'function.name' to be a string.`
395
+ });
396
+ }
397
+ toolCalls[index] = {
398
+ id: toolCallDelta.id,
399
+ type: "function",
400
+ function: {
401
+ name: toolCallDelta.function.name,
402
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
403
+ }
404
+ };
405
+ const toolCall2 = toolCalls[index];
406
+ if (toolCall2 == null) {
407
+ throw new Error("Tool call is missing");
408
+ }
409
+ if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
410
+ controller.enqueue({
411
+ type: "tool-call-delta",
412
+ toolCallType: "function",
413
+ toolCallId: toolCall2.id,
414
+ toolName: toolCall2.function.name,
415
+ argsTextDelta: toolCall2.function.arguments
416
+ });
417
+ controller.enqueue({
418
+ type: "tool-call",
419
+ toolCallType: "function",
420
+ toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
421
+ toolName: toolCall2.function.name,
422
+ args: toolCall2.function.arguments
423
+ });
424
+ }
425
+ continue;
426
+ }
427
+ const toolCall = toolCalls[index];
428
+ if (toolCall == null) {
429
+ throw new Error("Tool call is missing");
430
+ }
431
+ if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
432
+ toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
433
+ }
434
+ controller.enqueue({
435
+ type: "tool-call-delta",
436
+ toolCallType: "function",
437
+ toolCallId: toolCall.id,
438
+ toolName: toolCall.function.name,
439
+ argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
440
+ });
441
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
442
+ controller.enqueue({
443
+ type: "tool-call",
444
+ toolCallType: "function",
445
+ toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
446
+ toolName: toolCall.function.name,
447
+ args: toolCall.function.arguments
448
+ });
449
+ }
450
+ }
451
+ }
452
+ },
453
+ flush(controller) {
454
+ controller.enqueue({
455
+ type: "finish",
456
+ finishReason,
457
+ logprobs,
458
+ usage
459
+ });
460
+ }
461
+ })
462
+ ),
463
+ rawCall: { rawPrompt, rawSettings },
464
+ rawResponse: { headers: responseHeaders },
465
+ warnings: []
466
+ };
467
+ }
468
+ };
469
+ var openAIChatResponseSchema = z2.object({
470
+ choices: z2.array(
471
+ z2.object({
472
+ message: z2.object({
473
+ role: z2.literal("assistant"),
474
+ content: z2.string().nullable().optional(),
475
+ tool_calls: z2.array(
476
+ z2.object({
477
+ id: z2.string().optional().nullable(),
478
+ type: z2.literal("function"),
479
+ function: z2.object({
480
+ name: z2.string(),
481
+ arguments: z2.string()
482
+ })
483
+ })
484
+ ).optional()
485
+ }),
486
+ index: z2.number(),
487
+ logprobs: z2.object({
488
+ content: z2.array(
489
+ z2.object({
490
+ token: z2.string(),
491
+ logprob: z2.number(),
492
+ top_logprobs: z2.array(
493
+ z2.object({
494
+ token: z2.string(),
495
+ logprob: z2.number()
496
+ })
497
+ )
498
+ })
499
+ ).nullable()
500
+ }).nullable().optional(),
501
+ finish_reason: z2.string().optional().nullable()
502
+ })
503
+ ),
504
+ usage: z2.object({
505
+ prompt_tokens: z2.number(),
506
+ completion_tokens: z2.number()
507
+ })
508
+ });
509
+ var openrouterChatChunkSchema = z2.union([
510
+ z2.object({
511
+ choices: z2.array(
512
+ z2.object({
513
+ delta: z2.object({
514
+ role: z2.enum(["assistant"]).optional(),
515
+ content: z2.string().nullish(),
516
+ tool_calls: z2.array(
517
+ z2.object({
518
+ index: z2.number(),
519
+ id: z2.string().nullish(),
520
+ type: z2.literal("function").optional(),
521
+ function: z2.object({
522
+ name: z2.string().nullish(),
523
+ arguments: z2.string().nullish()
524
+ })
525
+ })
526
+ ).nullish()
527
+ }).nullish(),
528
+ logprobs: z2.object({
529
+ content: z2.array(
530
+ z2.object({
531
+ token: z2.string(),
532
+ logprob: z2.number(),
533
+ top_logprobs: z2.array(
534
+ z2.object({
535
+ token: z2.string(),
536
+ logprob: z2.number()
537
+ })
538
+ )
539
+ })
540
+ ).nullable()
541
+ }).nullish(),
542
+ finish_reason: z2.string().nullable().optional(),
543
+ index: z2.number()
544
+ })
545
+ ),
546
+ usage: z2.object({
547
+ prompt_tokens: z2.number(),
548
+ completion_tokens: z2.number()
549
+ }).nullish()
550
+ }),
551
+ openAIErrorDataSchema
552
+ ]);
553
+ function prepareToolsAndToolChoice(mode) {
554
+ var _a;
555
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
556
+ if (tools == null) {
557
+ return { tools: void 0, tool_choice: void 0 };
558
+ }
559
+ const mappedTools = tools.map((tool) => ({
560
+ type: "function",
561
+ function: {
562
+ name: tool.name,
563
+ description: tool.description,
564
+ parameters: tool.parameters
565
+ }
566
+ }));
567
+ const toolChoice = mode.toolChoice;
568
+ if (toolChoice == null) {
569
+ return { tools: mappedTools, tool_choice: void 0 };
570
+ }
571
+ const type = toolChoice.type;
572
+ switch (type) {
573
+ case "auto":
574
+ case "none":
575
+ case "required":
576
+ return { tools: mappedTools, tool_choice: type };
577
+ case "tool":
578
+ return {
579
+ tools: mappedTools,
580
+ tool_choice: {
581
+ type: "function",
582
+ function: {
583
+ name: toolChoice.toolName
584
+ }
585
+ }
586
+ };
587
+ default: {
588
+ const _exhaustiveCheck = type;
589
+ throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
590
+ }
591
+ }
592
+ }
593
+
594
+ // src/openrouter-completion-language-model.ts
595
+ import {
596
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError3
597
+ } from "@ai-sdk/provider";
598
+ import {
599
+ combineHeaders as combineHeaders2,
600
+ createEventSourceResponseHandler as createEventSourceResponseHandler2,
601
+ createJsonResponseHandler as createJsonResponseHandler2,
602
+ postJsonToApi as postJsonToApi2
603
+ } from "@ai-sdk/provider-utils";
604
+ import { z as z3 } from "zod";
605
+
606
+ // src/convert-to-openrouter-completion-prompt.ts
607
+ import {
608
+ InvalidPromptError,
609
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
610
+ } from "@ai-sdk/provider";
611
+ function convertToOpenRouterCompletionPrompt({
612
+ prompt,
613
+ inputFormat,
614
+ user = "user",
615
+ assistant = "assistant"
616
+ }) {
617
+ if (inputFormat === "prompt" && prompt.length === 1 && prompt[0] && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0] && prompt[0].content[0].type === "text") {
618
+ return { prompt: prompt[0].content[0].text };
619
+ }
620
+ let text = "";
621
+ if (prompt[0] && prompt[0].role === "system") {
622
+ text += `${prompt[0].content}
623
+
624
+ `;
625
+ prompt = prompt.slice(1);
626
+ }
627
+ for (const { role, content } of prompt) {
628
+ switch (role) {
629
+ case "system": {
630
+ throw new InvalidPromptError({
631
+ message: "Unexpected system message in prompt: ${content}",
632
+ prompt
633
+ });
634
+ }
635
+ case "user": {
636
+ const userMessage = content.map((part) => {
637
+ switch (part.type) {
638
+ case "text": {
639
+ return part.text;
640
+ }
641
+ case "image": {
642
+ throw new UnsupportedFunctionalityError2({
643
+ functionality: "images"
644
+ });
645
+ }
646
+ }
647
+ }).join("");
648
+ text += `${user}:
649
+ ${userMessage}
650
+
651
+ `;
652
+ break;
653
+ }
654
+ case "assistant": {
655
+ const assistantMessage = content.map((part) => {
656
+ switch (part.type) {
657
+ case "text": {
658
+ return part.text;
659
+ }
660
+ case "tool-call": {
661
+ throw new UnsupportedFunctionalityError2({
662
+ functionality: "tool-call messages"
663
+ });
664
+ }
665
+ }
666
+ }).join("");
667
+ text += `${assistant}:
668
+ ${assistantMessage}
669
+
670
+ `;
671
+ break;
672
+ }
673
+ case "tool": {
674
+ throw new UnsupportedFunctionalityError2({
675
+ functionality: "tool messages"
676
+ });
677
+ }
678
+ default: {
679
+ const _exhaustiveCheck = role;
680
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
681
+ }
682
+ }
683
+ }
684
+ text += `${assistant}:
685
+ `;
686
+ return {
687
+ prompt: text,
688
+ stopSequences: [`
689
+ ${user}:`]
690
+ };
691
+ }
692
+
693
+ // src/map-openrouter-completion-logprobs.ts
694
+ function mapOpenRouterCompletionLogProbs(logprobs) {
695
+ return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => {
696
+ var _a, _b;
697
+ return {
698
+ token,
699
+ logprob: (_a = logprobs.token_logprobs[index]) != null ? _a : 0,
700
+ topLogprobs: logprobs.top_logprobs ? Object.entries((_b = logprobs.top_logprobs[index]) != null ? _b : {}).map(
701
+ ([token2, logprob]) => ({
702
+ token: token2,
703
+ logprob
704
+ })
705
+ ) : []
706
+ };
707
+ });
708
+ }
709
+
710
+ // src/openrouter-completion-language-model.ts
711
+ var OpenRouterCompletionLanguageModel = class {
712
+ constructor(modelId, settings, config) {
713
+ this.specificationVersion = "v1";
714
+ this.defaultObjectGenerationMode = void 0;
715
+ this.modelId = modelId;
716
+ this.settings = settings;
717
+ this.config = config;
718
+ }
719
+ get provider() {
720
+ return this.config.provider;
721
+ }
722
+ getArgs({
723
+ mode,
724
+ inputFormat,
725
+ prompt,
726
+ maxTokens,
727
+ temperature,
728
+ topP,
729
+ frequencyPenalty,
730
+ presencePenalty,
731
+ seed
732
+ }) {
733
+ var _a;
734
+ const type = mode.type;
735
+ const { prompt: completionPrompt, stopSequences } = convertToOpenRouterCompletionPrompt({ prompt, inputFormat });
736
+ const baseArgs = {
737
+ // model id:
738
+ model: this.modelId,
739
+ // model specific settings:
740
+ echo: this.settings.echo,
741
+ logit_bias: this.settings.logitBias,
742
+ logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
743
+ suffix: this.settings.suffix,
744
+ user: this.settings.user,
745
+ // standardized settings:
746
+ max_tokens: maxTokens,
747
+ temperature,
748
+ top_p: topP,
749
+ frequency_penalty: frequencyPenalty,
750
+ presence_penalty: presencePenalty,
751
+ seed,
752
+ // prompt:
753
+ prompt: completionPrompt,
754
+ // stop sequences:
755
+ stop: stopSequences
756
+ };
757
+ switch (type) {
758
+ case "regular": {
759
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
760
+ throw new UnsupportedFunctionalityError3({
761
+ functionality: "tools"
762
+ });
763
+ }
764
+ if (mode.toolChoice) {
765
+ throw new UnsupportedFunctionalityError3({
766
+ functionality: "toolChoice"
767
+ });
768
+ }
769
+ return baseArgs;
770
+ }
771
+ case "object-json": {
772
+ throw new UnsupportedFunctionalityError3({
773
+ functionality: "object-json mode"
774
+ });
775
+ }
776
+ case "object-tool": {
777
+ throw new UnsupportedFunctionalityError3({
778
+ functionality: "object-tool mode"
779
+ });
780
+ }
781
+ case "object-grammar": {
782
+ throw new UnsupportedFunctionalityError3({
783
+ functionality: "object-grammar mode"
784
+ });
785
+ }
786
+ default: {
787
+ const _exhaustiveCheck = type;
788
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
789
+ }
790
+ }
791
+ }
792
+ async doGenerate(options) {
793
+ const args = this.getArgs(options);
794
+ const { responseHeaders, value: response } = await postJsonToApi2({
795
+ url: this.config.url({
796
+ path: "/completions",
797
+ modelId: this.modelId
798
+ }),
799
+ headers: combineHeaders2(this.config.headers(), options.headers),
800
+ body: args,
801
+ failedResponseHandler: openrouterFailedResponseHandler,
802
+ successfulResponseHandler: createJsonResponseHandler2(
803
+ openAICompletionResponseSchema
804
+ ),
805
+ abortSignal: options.abortSignal,
806
+ fetch: this.config.fetch
807
+ });
808
+ const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
809
+ const choice = response.choices[0];
810
+ if (!choice) {
811
+ throw new Error("No choice in OpenRouter completion response");
812
+ }
813
+ return {
814
+ text: choice.text,
815
+ usage: {
816
+ promptTokens: response.usage.prompt_tokens,
817
+ completionTokens: response.usage.completion_tokens
818
+ },
819
+ finishReason: mapOpenRouterFinishReason(choice.finish_reason),
820
+ logprobs: mapOpenRouterCompletionLogProbs(choice.logprobs),
821
+ rawCall: { rawPrompt, rawSettings },
822
+ rawResponse: { headers: responseHeaders },
823
+ warnings: []
824
+ };
825
+ }
826
+ async doStream(options) {
827
+ const args = this.getArgs(options);
828
+ const { responseHeaders, value: response } = await postJsonToApi2({
829
+ url: this.config.url({
830
+ path: "/completions",
831
+ modelId: this.modelId
832
+ }),
833
+ headers: combineHeaders2(this.config.headers(), options.headers),
834
+ body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
835
+ stream: true,
836
+ // only include stream_options when in strict compatibility mode:
837
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
838
+ }),
839
+ failedResponseHandler: openrouterFailedResponseHandler,
840
+ successfulResponseHandler: createEventSourceResponseHandler2(
841
+ openrouterCompletionChunkSchema
842
+ ),
843
+ abortSignal: options.abortSignal,
844
+ fetch: this.config.fetch
845
+ });
846
+ const _a = args, { prompt: rawPrompt } = _a, rawSettings = __objRest(_a, ["prompt"]);
847
+ let finishReason = "other";
848
+ let usage = {
849
+ promptTokens: Number.NaN,
850
+ completionTokens: Number.NaN
851
+ };
852
+ let logprobs;
853
+ return {
854
+ stream: response.pipeThrough(
855
+ new TransformStream({
856
+ transform(chunk, controller) {
857
+ if (!chunk.success) {
858
+ finishReason = "error";
859
+ controller.enqueue({ type: "error", error: chunk.error });
860
+ return;
861
+ }
862
+ const value = chunk.value;
863
+ if ("error" in value) {
864
+ finishReason = "error";
865
+ controller.enqueue({ type: "error", error: value.error });
866
+ return;
867
+ }
868
+ if (value.usage != null) {
869
+ usage = {
870
+ promptTokens: value.usage.prompt_tokens,
871
+ completionTokens: value.usage.completion_tokens
872
+ };
873
+ }
874
+ const choice = value.choices[0];
875
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
876
+ finishReason = mapOpenRouterFinishReason(choice.finish_reason);
877
+ }
878
+ if ((choice == null ? void 0 : choice.text) != null) {
879
+ controller.enqueue({
880
+ type: "text-delta",
881
+ textDelta: choice.text
882
+ });
883
+ }
884
+ const mappedLogprobs = mapOpenRouterCompletionLogProbs(
885
+ choice == null ? void 0 : choice.logprobs
886
+ );
887
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
888
+ if (logprobs === void 0) logprobs = [];
889
+ logprobs.push(...mappedLogprobs);
890
+ }
891
+ },
892
+ flush(controller) {
893
+ controller.enqueue({
894
+ type: "finish",
895
+ finishReason,
896
+ logprobs,
897
+ usage
898
+ });
899
+ }
900
+ })
901
+ ),
902
+ rawCall: { rawPrompt, rawSettings },
903
+ rawResponse: { headers: responseHeaders },
904
+ warnings: []
905
+ };
906
+ }
907
+ };
908
+ var openAICompletionResponseSchema = z3.object({
909
+ choices: z3.array(
910
+ z3.object({
911
+ text: z3.string(),
912
+ finish_reason: z3.string(),
913
+ logprobs: z3.object({
914
+ tokens: z3.array(z3.string()),
915
+ token_logprobs: z3.array(z3.number()),
916
+ top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
917
+ }).nullable().optional()
918
+ })
919
+ ),
920
+ usage: z3.object({
921
+ prompt_tokens: z3.number(),
922
+ completion_tokens: z3.number()
923
+ })
924
+ });
925
+ var openrouterCompletionChunkSchema = z3.union([
926
+ z3.object({
927
+ choices: z3.array(
928
+ z3.object({
929
+ text: z3.string(),
930
+ finish_reason: z3.string().nullish(),
931
+ index: z3.number(),
932
+ logprobs: z3.object({
933
+ tokens: z3.array(z3.string()),
934
+ token_logprobs: z3.array(z3.number()),
935
+ top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
936
+ }).nullable().optional()
937
+ })
938
+ ),
939
+ usage: z3.object({
940
+ prompt_tokens: z3.number(),
941
+ completion_tokens: z3.number()
942
+ }).optional().nullable()
943
+ }),
944
+ openAIErrorDataSchema
945
+ ]);
946
+ export {
947
+ OpenRouterChatLanguageModel,
948
+ OpenRouterCompletionLanguageModel
949
+ };
950
+ //# sourceMappingURL=index.mjs.map