@llmgateway/ai-sdk-provider 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -147
- package/dist/index.js +183 -175
- package/dist/index.js.map +1 -1
- package/dist/{index.cjs → index.mjs} +176 -184
- package/dist/index.mjs.map +1 -0
- package/dist/internal/index.js +176 -169
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/{index.cjs → index.mjs} +170 -177
- package/dist/internal/index.mjs.map +1 -0
- package/package.json +1 -2
- package/dist/index.cjs.map +0 -1
- package/dist/internal/index.cjs.map +0 -1
- /package/dist/{index.d.cts → index.d.mts} +0 -0
- /package/dist/internal/{index.d.cts → index.d.mts} +0 -0
|
@@ -1,9 +1,6 @@
|
|
|
1
|
-
"use strict";
|
|
2
1
|
var __defProp = Object.defineProperty;
|
|
3
2
|
var __defProps = Object.defineProperties;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
3
|
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
|
|
6
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
4
|
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
|
8
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
6
|
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
|
@@ -32,65 +29,53 @@ var __objRest = (source, exclude) => {
|
|
|
32
29
|
}
|
|
33
30
|
return target;
|
|
34
31
|
};
|
|
35
|
-
var __export = (target, all) => {
|
|
36
|
-
for (var name in all)
|
|
37
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
38
|
-
};
|
|
39
|
-
var __copyProps = (to, from, except, desc) => {
|
|
40
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
41
|
-
for (let key of __getOwnPropNames(from))
|
|
42
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
43
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
44
|
-
}
|
|
45
|
-
return to;
|
|
46
|
-
};
|
|
47
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
48
|
-
|
|
49
|
-
// src/index.ts
|
|
50
|
-
var index_exports = {};
|
|
51
|
-
__export(index_exports, {
|
|
52
|
-
LLMGateway: () => LLMGateway,
|
|
53
|
-
createLLMGateway: () => createLLMGateway,
|
|
54
|
-
llmgateway: () => llmgateway
|
|
55
|
-
});
|
|
56
|
-
module.exports = __toCommonJS(index_exports);
|
|
57
32
|
|
|
58
33
|
// src/llmgateway-facade.ts
|
|
59
|
-
|
|
34
|
+
import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
|
60
35
|
|
|
61
36
|
// src/schemas/reasoning-details.ts
|
|
62
|
-
|
|
63
|
-
var ReasoningDetailSummarySchema =
|
|
64
|
-
type:
|
|
65
|
-
summary:
|
|
37
|
+
import { z } from "zod";
|
|
38
|
+
var ReasoningDetailSummarySchema = z.object({
|
|
39
|
+
type: z.literal("reasoning.summary" /* Summary */),
|
|
40
|
+
summary: z.string()
|
|
66
41
|
});
|
|
67
|
-
var ReasoningDetailEncryptedSchema =
|
|
68
|
-
type:
|
|
69
|
-
data:
|
|
42
|
+
var ReasoningDetailEncryptedSchema = z.object({
|
|
43
|
+
type: z.literal("reasoning.encrypted" /* Encrypted */),
|
|
44
|
+
data: z.string()
|
|
70
45
|
});
|
|
71
|
-
var ReasoningDetailTextSchema =
|
|
72
|
-
type:
|
|
73
|
-
text:
|
|
74
|
-
signature:
|
|
46
|
+
var ReasoningDetailTextSchema = z.object({
|
|
47
|
+
type: z.literal("reasoning.text" /* Text */),
|
|
48
|
+
text: z.string().nullish(),
|
|
49
|
+
signature: z.string().nullish()
|
|
75
50
|
});
|
|
76
|
-
var ReasoningDetailUnionSchema =
|
|
51
|
+
var ReasoningDetailUnionSchema = z.union([
|
|
77
52
|
ReasoningDetailSummarySchema,
|
|
78
53
|
ReasoningDetailEncryptedSchema,
|
|
79
54
|
ReasoningDetailTextSchema
|
|
80
55
|
]);
|
|
81
|
-
var ReasoningDetailsWithUnknownSchema =
|
|
56
|
+
var ReasoningDetailsWithUnknownSchema = z.union([
|
|
82
57
|
ReasoningDetailUnionSchema,
|
|
83
|
-
|
|
58
|
+
z.unknown().transform(() => null)
|
|
84
59
|
]);
|
|
85
|
-
var ReasoningDetailArraySchema =
|
|
60
|
+
var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
|
|
86
61
|
|
|
87
62
|
// src/llmgateway-chat-language-model.ts
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
63
|
+
import {
|
|
64
|
+
InvalidResponseDataError,
|
|
65
|
+
UnsupportedFunctionalityError
|
|
66
|
+
} from "@ai-sdk/provider";
|
|
67
|
+
import {
|
|
68
|
+
combineHeaders,
|
|
69
|
+
createEventSourceResponseHandler,
|
|
70
|
+
createJsonResponseHandler,
|
|
71
|
+
generateId,
|
|
72
|
+
isParsableJson,
|
|
73
|
+
postJsonToApi
|
|
74
|
+
} from "@ai-sdk/provider-utils";
|
|
75
|
+
import { z as z3 } from "zod";
|
|
91
76
|
|
|
92
77
|
// src/convert-to-llmgateway-chat-messages.ts
|
|
93
|
-
|
|
78
|
+
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
94
79
|
function getCacheControl(providerMetadata) {
|
|
95
80
|
var _a, _b, _c;
|
|
96
81
|
const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
|
|
@@ -136,7 +121,7 @@ function convertToLLMGatewayChatMessages(prompt) {
|
|
|
136
121
|
return {
|
|
137
122
|
type: "image_url",
|
|
138
123
|
image_url: {
|
|
139
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${
|
|
124
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
|
|
140
125
|
part.image
|
|
141
126
|
)}`
|
|
142
127
|
},
|
|
@@ -150,7 +135,7 @@ function convertToLLMGatewayChatMessages(prompt) {
|
|
|
150
135
|
filename: String(
|
|
151
136
|
(_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.llmgateway) == null ? void 0 : _d.filename
|
|
152
137
|
),
|
|
153
|
-
file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${
|
|
138
|
+
file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
|
|
154
139
|
},
|
|
155
140
|
cache_control: cacheControl
|
|
156
141
|
};
|
|
@@ -276,17 +261,17 @@ function mapLLMGatewayFinishReason(finishReason) {
|
|
|
276
261
|
}
|
|
277
262
|
|
|
278
263
|
// src/llmgateway-error.ts
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
var LLMGatewayErrorResponseSchema =
|
|
282
|
-
error:
|
|
283
|
-
message:
|
|
284
|
-
type:
|
|
285
|
-
param:
|
|
286
|
-
code:
|
|
264
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
265
|
+
import { z as z2 } from "zod";
|
|
266
|
+
var LLMGatewayErrorResponseSchema = z2.object({
|
|
267
|
+
error: z2.object({
|
|
268
|
+
message: z2.string(),
|
|
269
|
+
type: z2.string(),
|
|
270
|
+
param: z2.any().nullable(),
|
|
271
|
+
code: z2.string().nullable()
|
|
287
272
|
})
|
|
288
273
|
});
|
|
289
|
-
var llmgatewayFailedResponseHandler =
|
|
274
|
+
var llmgatewayFailedResponseHandler = createJsonErrorResponseHandler({
|
|
290
275
|
errorSchema: LLMGatewayErrorResponseSchema,
|
|
291
276
|
errorToMessage: (data) => data.error.message
|
|
292
277
|
});
|
|
@@ -377,7 +362,7 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
377
362
|
// Handle all non-text types with a single default case
|
|
378
363
|
default: {
|
|
379
364
|
const _exhaustiveCheck = type;
|
|
380
|
-
throw new
|
|
365
|
+
throw new UnsupportedFunctionalityError({
|
|
381
366
|
functionality: `${_exhaustiveCheck} mode`
|
|
382
367
|
});
|
|
383
368
|
}
|
|
@@ -386,15 +371,15 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
386
371
|
async doGenerate(options) {
|
|
387
372
|
var _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
388
373
|
const args = this.getArgs(options);
|
|
389
|
-
const { responseHeaders, value: response } = await
|
|
374
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
390
375
|
url: this.config.url({
|
|
391
376
|
path: "/chat/completions",
|
|
392
377
|
modelId: this.modelId
|
|
393
378
|
}),
|
|
394
|
-
headers:
|
|
379
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
395
380
|
body: args,
|
|
396
381
|
failedResponseHandler: llmgatewayFailedResponseHandler,
|
|
397
|
-
successfulResponseHandler:
|
|
382
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
398
383
|
LLMGatewayNonStreamChatCompletionResponseSchema
|
|
399
384
|
),
|
|
400
385
|
abortSignal: options.abortSignal,
|
|
@@ -484,7 +469,7 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
484
469
|
var _a2;
|
|
485
470
|
return {
|
|
486
471
|
toolCallType: "function",
|
|
487
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 :
|
|
472
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
488
473
|
toolName: toolCall.function.name,
|
|
489
474
|
args: toolCall.function.arguments
|
|
490
475
|
};
|
|
@@ -500,12 +485,12 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
500
485
|
async doStream(options) {
|
|
501
486
|
var _a, _c;
|
|
502
487
|
const args = this.getArgs(options);
|
|
503
|
-
const { responseHeaders, value: response } = await
|
|
488
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
504
489
|
url: this.config.url({
|
|
505
490
|
path: "/chat/completions",
|
|
506
491
|
modelId: this.modelId
|
|
507
492
|
}),
|
|
508
|
-
headers:
|
|
493
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
509
494
|
body: __spreadProps(__spreadValues({}, args), {
|
|
510
495
|
stream: true,
|
|
511
496
|
// only include stream_options when in strict compatibility mode:
|
|
@@ -514,7 +499,7 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
514
499
|
}, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
|
|
515
500
|
}),
|
|
516
501
|
failedResponseHandler: llmgatewayFailedResponseHandler,
|
|
517
|
-
successfulResponseHandler:
|
|
502
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
518
503
|
LLMGatewayStreamChatCompletionChunkSchema
|
|
519
504
|
),
|
|
520
505
|
abortSignal: options.abortSignal,
|
|
@@ -655,19 +640,19 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
655
640
|
const index = toolCallDelta.index;
|
|
656
641
|
if (toolCalls[index] == null) {
|
|
657
642
|
if (toolCallDelta.type !== "function") {
|
|
658
|
-
throw new
|
|
643
|
+
throw new InvalidResponseDataError({
|
|
659
644
|
data: toolCallDelta,
|
|
660
645
|
message: `Expected 'function' type.`
|
|
661
646
|
});
|
|
662
647
|
}
|
|
663
648
|
if (toolCallDelta.id == null) {
|
|
664
|
-
throw new
|
|
649
|
+
throw new InvalidResponseDataError({
|
|
665
650
|
data: toolCallDelta,
|
|
666
651
|
message: `Expected 'id' to be a string.`
|
|
667
652
|
});
|
|
668
653
|
}
|
|
669
654
|
if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
|
|
670
|
-
throw new
|
|
655
|
+
throw new InvalidResponseDataError({
|
|
671
656
|
data: toolCallDelta,
|
|
672
657
|
message: `Expected 'function.name' to be a string.`
|
|
673
658
|
});
|
|
@@ -685,7 +670,7 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
685
670
|
if (toolCall2 == null) {
|
|
686
671
|
throw new Error("Tool call is missing");
|
|
687
672
|
}
|
|
688
|
-
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null &&
|
|
673
|
+
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
|
|
689
674
|
controller.enqueue({
|
|
690
675
|
type: "tool-call-delta",
|
|
691
676
|
toolCallType: "function",
|
|
@@ -696,7 +681,7 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
696
681
|
controller.enqueue({
|
|
697
682
|
type: "tool-call",
|
|
698
683
|
toolCallType: "function",
|
|
699
|
-
toolCallId: (_g = toolCall2.id) != null ? _g :
|
|
684
|
+
toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
|
|
700
685
|
toolName: toolCall2.function.name,
|
|
701
686
|
args: toolCall2.function.arguments
|
|
702
687
|
});
|
|
@@ -718,11 +703,11 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
718
703
|
toolName: toolCall.function.name,
|
|
719
704
|
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
|
|
720
705
|
});
|
|
721
|
-
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null &&
|
|
706
|
+
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
722
707
|
controller.enqueue({
|
|
723
708
|
type: "tool-call",
|
|
724
709
|
toolCallType: "function",
|
|
725
|
-
toolCallId: (_n = toolCall.id) != null ? _n :
|
|
710
|
+
toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
|
|
726
711
|
toolName: toolCall.function.name,
|
|
727
712
|
args: toolCall.function.arguments
|
|
728
713
|
});
|
|
@@ -739,10 +724,10 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
739
724
|
controller.enqueue({
|
|
740
725
|
type: "tool-call",
|
|
741
726
|
toolCallType: "function",
|
|
742
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 :
|
|
727
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
743
728
|
toolName: toolCall.function.name,
|
|
744
729
|
// Coerce invalid arguments to an empty JSON object
|
|
745
|
-
args:
|
|
730
|
+
args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
|
|
746
731
|
});
|
|
747
732
|
toolCall.sent = true;
|
|
748
733
|
}
|
|
@@ -770,97 +755,97 @@ var LLMGatewayChatLanguageModel = class {
|
|
|
770
755
|
};
|
|
771
756
|
}
|
|
772
757
|
};
|
|
773
|
-
var LLMGatewayChatCompletionBaseResponseSchema =
|
|
774
|
-
id:
|
|
775
|
-
model:
|
|
776
|
-
usage:
|
|
777
|
-
prompt_tokens:
|
|
778
|
-
prompt_tokens_details:
|
|
779
|
-
cached_tokens:
|
|
758
|
+
var LLMGatewayChatCompletionBaseResponseSchema = z3.object({
|
|
759
|
+
id: z3.string().optional(),
|
|
760
|
+
model: z3.string().optional(),
|
|
761
|
+
usage: z3.object({
|
|
762
|
+
prompt_tokens: z3.number(),
|
|
763
|
+
prompt_tokens_details: z3.object({
|
|
764
|
+
cached_tokens: z3.number()
|
|
780
765
|
}).nullish(),
|
|
781
|
-
completion_tokens:
|
|
782
|
-
completion_tokens_details:
|
|
783
|
-
reasoning_tokens:
|
|
766
|
+
completion_tokens: z3.number(),
|
|
767
|
+
completion_tokens_details: z3.object({
|
|
768
|
+
reasoning_tokens: z3.number()
|
|
784
769
|
}).nullish(),
|
|
785
|
-
total_tokens:
|
|
786
|
-
cost:
|
|
770
|
+
total_tokens: z3.number(),
|
|
771
|
+
cost: z3.number().optional()
|
|
787
772
|
}).nullish()
|
|
788
773
|
});
|
|
789
774
|
var LLMGatewayNonStreamChatCompletionResponseSchema = LLMGatewayChatCompletionBaseResponseSchema.extend({
|
|
790
|
-
choices:
|
|
791
|
-
|
|
792
|
-
message:
|
|
793
|
-
role:
|
|
794
|
-
content:
|
|
795
|
-
reasoning:
|
|
775
|
+
choices: z3.array(
|
|
776
|
+
z3.object({
|
|
777
|
+
message: z3.object({
|
|
778
|
+
role: z3.literal("assistant"),
|
|
779
|
+
content: z3.string().nullable().optional(),
|
|
780
|
+
reasoning: z3.string().nullable().optional(),
|
|
796
781
|
reasoning_details: ReasoningDetailArraySchema.nullish(),
|
|
797
|
-
tool_calls:
|
|
798
|
-
|
|
799
|
-
id:
|
|
800
|
-
type:
|
|
801
|
-
function:
|
|
802
|
-
name:
|
|
803
|
-
arguments:
|
|
782
|
+
tool_calls: z3.array(
|
|
783
|
+
z3.object({
|
|
784
|
+
id: z3.string().optional().nullable(),
|
|
785
|
+
type: z3.literal("function"),
|
|
786
|
+
function: z3.object({
|
|
787
|
+
name: z3.string(),
|
|
788
|
+
arguments: z3.string()
|
|
804
789
|
})
|
|
805
790
|
})
|
|
806
791
|
).optional()
|
|
807
792
|
}),
|
|
808
|
-
index:
|
|
809
|
-
logprobs:
|
|
810
|
-
content:
|
|
811
|
-
|
|
812
|
-
token:
|
|
813
|
-
logprob:
|
|
814
|
-
top_logprobs:
|
|
815
|
-
|
|
816
|
-
token:
|
|
817
|
-
logprob:
|
|
793
|
+
index: z3.number(),
|
|
794
|
+
logprobs: z3.object({
|
|
795
|
+
content: z3.array(
|
|
796
|
+
z3.object({
|
|
797
|
+
token: z3.string(),
|
|
798
|
+
logprob: z3.number(),
|
|
799
|
+
top_logprobs: z3.array(
|
|
800
|
+
z3.object({
|
|
801
|
+
token: z3.string(),
|
|
802
|
+
logprob: z3.number()
|
|
818
803
|
})
|
|
819
804
|
)
|
|
820
805
|
})
|
|
821
806
|
).nullable()
|
|
822
807
|
}).nullable().optional(),
|
|
823
|
-
finish_reason:
|
|
808
|
+
finish_reason: z3.string().optional().nullable()
|
|
824
809
|
})
|
|
825
810
|
)
|
|
826
811
|
});
|
|
827
|
-
var LLMGatewayStreamChatCompletionChunkSchema =
|
|
812
|
+
var LLMGatewayStreamChatCompletionChunkSchema = z3.union([
|
|
828
813
|
LLMGatewayChatCompletionBaseResponseSchema.extend({
|
|
829
|
-
choices:
|
|
830
|
-
|
|
831
|
-
delta:
|
|
832
|
-
role:
|
|
833
|
-
content:
|
|
834
|
-
reasoning:
|
|
814
|
+
choices: z3.array(
|
|
815
|
+
z3.object({
|
|
816
|
+
delta: z3.object({
|
|
817
|
+
role: z3.enum(["assistant"]).optional(),
|
|
818
|
+
content: z3.string().nullish(),
|
|
819
|
+
reasoning: z3.string().nullish().optional(),
|
|
835
820
|
reasoning_details: ReasoningDetailArraySchema.nullish(),
|
|
836
|
-
tool_calls:
|
|
837
|
-
|
|
838
|
-
index:
|
|
839
|
-
id:
|
|
840
|
-
type:
|
|
841
|
-
function:
|
|
842
|
-
name:
|
|
843
|
-
arguments:
|
|
821
|
+
tool_calls: z3.array(
|
|
822
|
+
z3.object({
|
|
823
|
+
index: z3.number(),
|
|
824
|
+
id: z3.string().nullish(),
|
|
825
|
+
type: z3.literal("function").optional(),
|
|
826
|
+
function: z3.object({
|
|
827
|
+
name: z3.string().nullish(),
|
|
828
|
+
arguments: z3.string().nullish()
|
|
844
829
|
})
|
|
845
830
|
})
|
|
846
831
|
).nullish()
|
|
847
832
|
}).nullish(),
|
|
848
|
-
logprobs:
|
|
849
|
-
content:
|
|
850
|
-
|
|
851
|
-
token:
|
|
852
|
-
logprob:
|
|
853
|
-
top_logprobs:
|
|
854
|
-
|
|
855
|
-
token:
|
|
856
|
-
logprob:
|
|
833
|
+
logprobs: z3.object({
|
|
834
|
+
content: z3.array(
|
|
835
|
+
z3.object({
|
|
836
|
+
token: z3.string(),
|
|
837
|
+
logprob: z3.number(),
|
|
838
|
+
top_logprobs: z3.array(
|
|
839
|
+
z3.object({
|
|
840
|
+
token: z3.string(),
|
|
841
|
+
logprob: z3.number()
|
|
857
842
|
})
|
|
858
843
|
)
|
|
859
844
|
})
|
|
860
845
|
).nullable()
|
|
861
846
|
}).nullish(),
|
|
862
|
-
finish_reason:
|
|
863
|
-
index:
|
|
847
|
+
finish_reason: z3.string().nullable().optional(),
|
|
848
|
+
index: z3.number()
|
|
864
849
|
})
|
|
865
850
|
)
|
|
866
851
|
}),
|
|
@@ -918,12 +903,20 @@ function prepareToolsAndToolChoice(mode) {
|
|
|
918
903
|
}
|
|
919
904
|
|
|
920
905
|
// src/llmgateway-completion-language-model.ts
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
906
|
+
import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
|
|
907
|
+
import {
|
|
908
|
+
combineHeaders as combineHeaders2,
|
|
909
|
+
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
910
|
+
createJsonResponseHandler as createJsonResponseHandler2,
|
|
911
|
+
postJsonToApi as postJsonToApi2
|
|
912
|
+
} from "@ai-sdk/provider-utils";
|
|
913
|
+
import { z as z4 } from "zod";
|
|
924
914
|
|
|
925
915
|
// src/convert-to-llmgateway-completion-prompt.ts
|
|
926
|
-
|
|
916
|
+
import {
|
|
917
|
+
InvalidPromptError,
|
|
918
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
919
|
+
} from "@ai-sdk/provider";
|
|
927
920
|
function convertToLLMGatewayCompletionPrompt({
|
|
928
921
|
prompt,
|
|
929
922
|
inputFormat,
|
|
@@ -943,7 +936,7 @@ function convertToLLMGatewayCompletionPrompt({
|
|
|
943
936
|
for (const { role, content } of prompt) {
|
|
944
937
|
switch (role) {
|
|
945
938
|
case "system": {
|
|
946
|
-
throw new
|
|
939
|
+
throw new InvalidPromptError({
|
|
947
940
|
message: "Unexpected system message in prompt: ${content}",
|
|
948
941
|
prompt
|
|
949
942
|
});
|
|
@@ -955,12 +948,12 @@ function convertToLLMGatewayCompletionPrompt({
|
|
|
955
948
|
return part.text;
|
|
956
949
|
}
|
|
957
950
|
case "image": {
|
|
958
|
-
throw new
|
|
951
|
+
throw new UnsupportedFunctionalityError2({
|
|
959
952
|
functionality: "images"
|
|
960
953
|
});
|
|
961
954
|
}
|
|
962
955
|
case "file": {
|
|
963
|
-
throw new
|
|
956
|
+
throw new UnsupportedFunctionalityError2({
|
|
964
957
|
functionality: "file attachments"
|
|
965
958
|
});
|
|
966
959
|
}
|
|
@@ -985,22 +978,22 @@ ${userMessage}
|
|
|
985
978
|
return part.text;
|
|
986
979
|
}
|
|
987
980
|
case "tool-call": {
|
|
988
|
-
throw new
|
|
981
|
+
throw new UnsupportedFunctionalityError2({
|
|
989
982
|
functionality: "tool-call messages"
|
|
990
983
|
});
|
|
991
984
|
}
|
|
992
985
|
case "reasoning": {
|
|
993
|
-
throw new
|
|
986
|
+
throw new UnsupportedFunctionalityError2({
|
|
994
987
|
functionality: "reasoning messages"
|
|
995
988
|
});
|
|
996
989
|
}
|
|
997
990
|
case "redacted-reasoning": {
|
|
998
|
-
throw new
|
|
991
|
+
throw new UnsupportedFunctionalityError2({
|
|
999
992
|
functionality: "redacted reasoning messages"
|
|
1000
993
|
});
|
|
1001
994
|
}
|
|
1002
995
|
case "file": {
|
|
1003
|
-
throw new
|
|
996
|
+
throw new UnsupportedFunctionalityError2({
|
|
1004
997
|
functionality: "file attachments"
|
|
1005
998
|
});
|
|
1006
999
|
}
|
|
@@ -1019,7 +1012,7 @@ ${assistantMessage}
|
|
|
1019
1012
|
break;
|
|
1020
1013
|
}
|
|
1021
1014
|
case "tool": {
|
|
1022
|
-
throw new
|
|
1015
|
+
throw new UnsupportedFunctionalityError2({
|
|
1023
1016
|
functionality: "tool messages"
|
|
1024
1017
|
});
|
|
1025
1018
|
}
|
|
@@ -1112,31 +1105,31 @@ var LLMGatewayCompletionLanguageModel = class {
|
|
|
1112
1105
|
switch (type) {
|
|
1113
1106
|
case "regular": {
|
|
1114
1107
|
if ((_b = mode.tools) == null ? void 0 : _b.length) {
|
|
1115
|
-
throw new
|
|
1108
|
+
throw new UnsupportedFunctionalityError3({
|
|
1116
1109
|
functionality: "tools"
|
|
1117
1110
|
});
|
|
1118
1111
|
}
|
|
1119
1112
|
if (mode.toolChoice) {
|
|
1120
|
-
throw new
|
|
1113
|
+
throw new UnsupportedFunctionalityError3({
|
|
1121
1114
|
functionality: "toolChoice"
|
|
1122
1115
|
});
|
|
1123
1116
|
}
|
|
1124
1117
|
return baseArgs;
|
|
1125
1118
|
}
|
|
1126
1119
|
case "object-json": {
|
|
1127
|
-
throw new
|
|
1120
|
+
throw new UnsupportedFunctionalityError3({
|
|
1128
1121
|
functionality: "object-json mode"
|
|
1129
1122
|
});
|
|
1130
1123
|
}
|
|
1131
1124
|
case "object-tool": {
|
|
1132
|
-
throw new
|
|
1125
|
+
throw new UnsupportedFunctionalityError3({
|
|
1133
1126
|
functionality: "object-tool mode"
|
|
1134
1127
|
});
|
|
1135
1128
|
}
|
|
1136
1129
|
// Handle all non-text types with a single default case
|
|
1137
1130
|
default: {
|
|
1138
1131
|
const _exhaustiveCheck = type;
|
|
1139
|
-
throw new
|
|
1132
|
+
throw new UnsupportedFunctionalityError3({
|
|
1140
1133
|
functionality: `${_exhaustiveCheck} mode`
|
|
1141
1134
|
});
|
|
1142
1135
|
}
|
|
@@ -1145,15 +1138,15 @@ var LLMGatewayCompletionLanguageModel = class {
|
|
|
1145
1138
|
async doGenerate(options) {
|
|
1146
1139
|
var _b, _c, _d, _e, _f;
|
|
1147
1140
|
const args = this.getArgs(options);
|
|
1148
|
-
const { responseHeaders, value: response } = await (
|
|
1141
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1149
1142
|
url: this.config.url({
|
|
1150
1143
|
path: "/completions",
|
|
1151
1144
|
modelId: this.modelId
|
|
1152
1145
|
}),
|
|
1153
|
-
headers: (
|
|
1146
|
+
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1154
1147
|
body: args,
|
|
1155
1148
|
failedResponseHandler: llmgatewayFailedResponseHandler,
|
|
1156
|
-
successfulResponseHandler: (
|
|
1149
|
+
successfulResponseHandler: createJsonResponseHandler2(
|
|
1157
1150
|
LLMGatewayCompletionChunkSchema
|
|
1158
1151
|
),
|
|
1159
1152
|
abortSignal: options.abortSignal,
|
|
@@ -1187,19 +1180,19 @@ var LLMGatewayCompletionLanguageModel = class {
|
|
|
1187
1180
|
}
|
|
1188
1181
|
async doStream(options) {
|
|
1189
1182
|
const args = this.getArgs(options);
|
|
1190
|
-
const { responseHeaders, value: response } = await (
|
|
1183
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1191
1184
|
url: this.config.url({
|
|
1192
1185
|
path: "/completions",
|
|
1193
1186
|
modelId: this.modelId
|
|
1194
1187
|
}),
|
|
1195
|
-
headers: (
|
|
1188
|
+
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1196
1189
|
body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
|
|
1197
1190
|
stream: true,
|
|
1198
1191
|
// only include stream_options when in strict compatibility mode:
|
|
1199
1192
|
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1200
1193
|
}),
|
|
1201
1194
|
failedResponseHandler: llmgatewayFailedResponseHandler,
|
|
1202
|
-
successfulResponseHandler: (
|
|
1195
|
+
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1203
1196
|
LLMGatewayCompletionChunkSchema
|
|
1204
1197
|
),
|
|
1205
1198
|
abortSignal: options.abortSignal,
|
|
@@ -1269,27 +1262,27 @@ var LLMGatewayCompletionLanguageModel = class {
|
|
|
1269
1262
|
};
|
|
1270
1263
|
}
|
|
1271
1264
|
};
|
|
1272
|
-
var LLMGatewayCompletionChunkSchema =
|
|
1273
|
-
|
|
1274
|
-
id:
|
|
1275
|
-
model:
|
|
1276
|
-
choices:
|
|
1277
|
-
|
|
1278
|
-
text:
|
|
1279
|
-
reasoning:
|
|
1265
|
+
var LLMGatewayCompletionChunkSchema = z4.union([
|
|
1266
|
+
z4.object({
|
|
1267
|
+
id: z4.string().optional(),
|
|
1268
|
+
model: z4.string().optional(),
|
|
1269
|
+
choices: z4.array(
|
|
1270
|
+
z4.object({
|
|
1271
|
+
text: z4.string(),
|
|
1272
|
+
reasoning: z4.string().nullish().optional(),
|
|
1280
1273
|
reasoning_details: ReasoningDetailArraySchema.nullish(),
|
|
1281
|
-
finish_reason:
|
|
1282
|
-
index:
|
|
1283
|
-
logprobs:
|
|
1284
|
-
tokens:
|
|
1285
|
-
token_logprobs:
|
|
1286
|
-
top_logprobs:
|
|
1274
|
+
finish_reason: z4.string().nullish(),
|
|
1275
|
+
index: z4.number(),
|
|
1276
|
+
logprobs: z4.object({
|
|
1277
|
+
tokens: z4.array(z4.string()),
|
|
1278
|
+
token_logprobs: z4.array(z4.number()),
|
|
1279
|
+
top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
|
|
1287
1280
|
}).nullable().optional()
|
|
1288
1281
|
})
|
|
1289
1282
|
),
|
|
1290
|
-
usage:
|
|
1291
|
-
prompt_tokens:
|
|
1292
|
-
completion_tokens:
|
|
1283
|
+
usage: z4.object({
|
|
1284
|
+
prompt_tokens: z4.number(),
|
|
1285
|
+
completion_tokens: z4.number()
|
|
1293
1286
|
}).optional().nullable()
|
|
1294
1287
|
}),
|
|
1295
1288
|
LLMGatewayErrorResponseSchema
|
|
@@ -1302,7 +1295,7 @@ var LLMGateway = class {
|
|
|
1302
1295
|
*/
|
|
1303
1296
|
constructor(options = {}) {
|
|
1304
1297
|
var _a, _b;
|
|
1305
|
-
this.baseURL = (_b =
|
|
1298
|
+
this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
|
|
1306
1299
|
this.apiKey = options.apiKey;
|
|
1307
1300
|
this.headers = options.headers;
|
|
1308
1301
|
}
|
|
@@ -1310,7 +1303,7 @@ var LLMGateway = class {
|
|
|
1310
1303
|
return {
|
|
1311
1304
|
baseURL: this.baseURL,
|
|
1312
1305
|
headers: () => __spreadValues({
|
|
1313
|
-
Authorization: `Bearer ${
|
|
1306
|
+
Authorization: `Bearer ${loadApiKey({
|
|
1314
1307
|
apiKey: this.apiKey,
|
|
1315
1308
|
environmentVariableName: "LLMGATEWAY_API_KEY",
|
|
1316
1309
|
description: "LLMGateway"
|
|
@@ -1337,13 +1330,13 @@ var LLMGateway = class {
|
|
|
1337
1330
|
};
|
|
1338
1331
|
|
|
1339
1332
|
// src/llmgateway-provider.ts
|
|
1340
|
-
|
|
1333
|
+
import { loadApiKey as loadApiKey2, withoutTrailingSlash as withoutTrailingSlash2 } from "@ai-sdk/provider-utils";
|
|
1341
1334
|
function createLLMGateway(options = {}) {
|
|
1342
1335
|
var _a, _b, _c;
|
|
1343
|
-
const baseURL = (_b = (
|
|
1336
|
+
const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
|
|
1344
1337
|
const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
|
|
1345
1338
|
const getHeaders = () => __spreadValues({
|
|
1346
|
-
Authorization: `Bearer ${(
|
|
1339
|
+
Authorization: `Bearer ${loadApiKey2({
|
|
1347
1340
|
apiKey: options.apiKey,
|
|
1348
1341
|
environmentVariableName: "LLMGATEWAY_API_KEY",
|
|
1349
1342
|
description: "LLMGateway"
|
|
@@ -1389,10 +1382,9 @@ var llmgateway = createLLMGateway({
|
|
|
1389
1382
|
compatibility: "strict"
|
|
1390
1383
|
// strict for LLMGateway API
|
|
1391
1384
|
});
|
|
1392
|
-
|
|
1393
|
-
0 && (module.exports = {
|
|
1385
|
+
export {
|
|
1394
1386
|
LLMGateway,
|
|
1395
1387
|
createLLMGateway,
|
|
1396
1388
|
llmgateway
|
|
1397
|
-
}
|
|
1398
|
-
//# sourceMappingURL=index.
|
|
1389
|
+
};
|
|
1390
|
+
//# sourceMappingURL=index.mjs.map
|