@friendliai/ai-provider 0.2.7-alpha.1 → 0.3.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +21 -2
- package/dist/index.d.ts +21 -2
- package/dist/index.js +127 -51
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +128 -51
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV1, SpeechModelV1 } from '@ai-sdk/provider';
|
|
1
|
+
import { LanguageModelV1ProviderDefinedTool, ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV1, SpeechModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
@@ -6,6 +6,21 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
declare function webUrlBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
10
|
+
declare function webSearchBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
11
|
+
declare function mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
12
|
+
declare function mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
13
|
+
declare function mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
14
|
+
declare function codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
15
|
+
declare const friendliTools: {
|
|
16
|
+
webSearchBetaTool: typeof webSearchBetaTool;
|
|
17
|
+
webUrlBetaTool: typeof webUrlBetaTool;
|
|
18
|
+
mathCalendarBetaTool: typeof mathCalendarBetaTool;
|
|
19
|
+
mathStatisticsBetaTool: typeof mathStatisticsBetaTool;
|
|
20
|
+
mathCalculatorBetaTool: typeof mathCalculatorBetaTool;
|
|
21
|
+
codePythonInterpreterBetaTool: typeof codePythonInterpreterBetaTool;
|
|
22
|
+
};
|
|
23
|
+
|
|
9
24
|
interface FriendliAIProviderSettings {
|
|
10
25
|
/**
|
|
11
26
|
* FriendliAI API key. (FRIENDLI_TOKEN)
|
|
@@ -14,7 +29,7 @@ interface FriendliAIProviderSettings {
|
|
|
14
29
|
/**
|
|
15
30
|
* Base URL for the API calls.
|
|
16
31
|
*/
|
|
17
|
-
baseURL?: string;
|
|
32
|
+
baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools';
|
|
18
33
|
/**
|
|
19
34
|
* Custom headers to include in the requests.
|
|
20
35
|
*/
|
|
@@ -67,6 +82,10 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
67
82
|
* TODO: Implement for Dedicated users
|
|
68
83
|
*/
|
|
69
84
|
speech(modelId: string & {}): SpeechModelV1;
|
|
85
|
+
/**
|
|
86
|
+
* Friendli-specific tools.
|
|
87
|
+
*/
|
|
88
|
+
tools: typeof friendliTools;
|
|
70
89
|
}
|
|
71
90
|
/**
|
|
72
91
|
Create an FriendliAI provider instance.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV1, SpeechModelV1 } from '@ai-sdk/provider';
|
|
1
|
+
import { LanguageModelV1ProviderDefinedTool, ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV1, SpeechModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
@@ -6,6 +6,21 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
declare function webUrlBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
10
|
+
declare function webSearchBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
11
|
+
declare function mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
12
|
+
declare function mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
13
|
+
declare function mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
14
|
+
declare function codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool;
|
|
15
|
+
declare const friendliTools: {
|
|
16
|
+
webSearchBetaTool: typeof webSearchBetaTool;
|
|
17
|
+
webUrlBetaTool: typeof webUrlBetaTool;
|
|
18
|
+
mathCalendarBetaTool: typeof mathCalendarBetaTool;
|
|
19
|
+
mathStatisticsBetaTool: typeof mathStatisticsBetaTool;
|
|
20
|
+
mathCalculatorBetaTool: typeof mathCalculatorBetaTool;
|
|
21
|
+
codePythonInterpreterBetaTool: typeof codePythonInterpreterBetaTool;
|
|
22
|
+
};
|
|
23
|
+
|
|
9
24
|
interface FriendliAIProviderSettings {
|
|
10
25
|
/**
|
|
11
26
|
* FriendliAI API key. (FRIENDLI_TOKEN)
|
|
@@ -14,7 +29,7 @@ interface FriendliAIProviderSettings {
|
|
|
14
29
|
/**
|
|
15
30
|
* Base URL for the API calls.
|
|
16
31
|
*/
|
|
17
|
-
baseURL?: string;
|
|
32
|
+
baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools';
|
|
18
33
|
/**
|
|
19
34
|
* Custom headers to include in the requests.
|
|
20
35
|
*/
|
|
@@ -67,6 +82,10 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
67
82
|
* TODO: Implement for Dedicated users
|
|
68
83
|
*/
|
|
69
84
|
speech(modelId: string & {}): SpeechModelV1;
|
|
85
|
+
/**
|
|
86
|
+
* Friendli-specific tools.
|
|
87
|
+
*/
|
|
88
|
+
tools: typeof friendliTools;
|
|
70
89
|
}
|
|
71
90
|
/**
|
|
72
91
|
Create an FriendliAI provider instance.
|
package/dist/index.js
CHANGED
|
@@ -64,6 +64,7 @@ function prepareTools({
|
|
|
64
64
|
tools,
|
|
65
65
|
toolChoice
|
|
66
66
|
}) {
|
|
67
|
+
var _a;
|
|
67
68
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
68
69
|
const toolWarnings = [];
|
|
69
70
|
if (tools == null) {
|
|
@@ -72,7 +73,10 @@ function prepareTools({
|
|
|
72
73
|
const openaiCompatTools = [];
|
|
73
74
|
for (const tool of tools) {
|
|
74
75
|
if (tool.type === "provider-defined") {
|
|
75
|
-
|
|
76
|
+
openaiCompatTools.push({
|
|
77
|
+
// NOTE: It would be better to use tool.name, but since ":" is replaced with "_", the following code is used instead
|
|
78
|
+
type: (_a = tool.id.split(".")[1]) != null ? _a : "unknown"
|
|
79
|
+
});
|
|
76
80
|
} else {
|
|
77
81
|
openaiCompatTools.push({
|
|
78
82
|
type: "function",
|
|
@@ -143,18 +147,24 @@ var FriendliAIChatLanguageModel = class {
|
|
|
143
147
|
topK,
|
|
144
148
|
frequencyPenalty,
|
|
145
149
|
presencePenalty,
|
|
146
|
-
|
|
150
|
+
providerOptions,
|
|
147
151
|
stopSequences,
|
|
148
152
|
responseFormat,
|
|
149
153
|
seed,
|
|
150
154
|
toolChoice,
|
|
151
|
-
tools
|
|
155
|
+
tools,
|
|
156
|
+
stream
|
|
152
157
|
}) {
|
|
153
158
|
var _a;
|
|
154
159
|
const warnings = [];
|
|
155
160
|
if (topK != null) {
|
|
156
161
|
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
157
162
|
}
|
|
163
|
+
const friendliOptions = await (0, import_provider_utils2.parseProviderOptions)({
|
|
164
|
+
provider: "friendli",
|
|
165
|
+
providerOptions,
|
|
166
|
+
schema: friendliProviderOptionsSchema
|
|
167
|
+
});
|
|
158
168
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
159
169
|
warnings.push({
|
|
160
170
|
type: "unsupported-setting",
|
|
@@ -177,6 +187,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
177
187
|
// model specific settings:
|
|
178
188
|
// user: compatibleOptions.user,
|
|
179
189
|
// standardized settings:
|
|
190
|
+
stream,
|
|
180
191
|
max_tokens: maxOutputTokens,
|
|
181
192
|
temperature,
|
|
182
193
|
top_p: topP,
|
|
@@ -189,7 +200,10 @@ var FriendliAIChatLanguageModel = class {
|
|
|
189
200
|
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
190
201
|
description: responseFormat.description
|
|
191
202
|
}
|
|
192
|
-
} : { type: "json_object" } : void 0
|
|
203
|
+
} : { type: "json_object" } : (friendliOptions == null ? void 0 : friendliOptions.regex) != null ? {
|
|
204
|
+
type: "regex",
|
|
205
|
+
schema: friendliOptions.regex
|
|
206
|
+
} : void 0,
|
|
193
207
|
stop: stopSequences,
|
|
194
208
|
seed,
|
|
195
209
|
// ...providerOptions?.[this.providerOptionsName],
|
|
@@ -198,14 +212,15 @@ var FriendliAIChatLanguageModel = class {
|
|
|
198
212
|
messages: (0, import_internal.convertToOpenAICompatibleChatMessages)(prompt),
|
|
199
213
|
// tools:
|
|
200
214
|
tools: openaiTools,
|
|
201
|
-
tool_choice: openaiToolChoice
|
|
215
|
+
tool_choice: openaiToolChoice,
|
|
216
|
+
parallel_tool_calls: friendliOptions == null ? void 0 : friendliOptions.parallelToolCalls
|
|
202
217
|
},
|
|
203
218
|
warnings: [...warnings, ...toolWarnings]
|
|
204
219
|
};
|
|
205
220
|
}
|
|
206
221
|
async doGenerate(options) {
|
|
207
222
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
208
|
-
const { args, warnings } = await this.getArgs({ ...options });
|
|
223
|
+
const { args, warnings } = await this.getArgs({ ...options, stream: false });
|
|
209
224
|
const body = JSON.stringify(args);
|
|
210
225
|
const {
|
|
211
226
|
responseHeaders,
|
|
@@ -271,7 +286,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
271
286
|
}
|
|
272
287
|
async doStream(options) {
|
|
273
288
|
var _a;
|
|
274
|
-
const { args, warnings } = await this.getArgs({ ...options });
|
|
289
|
+
const { args, warnings } = await this.getArgs({ ...options, stream: true });
|
|
275
290
|
const body = {
|
|
276
291
|
...args,
|
|
277
292
|
stream: true,
|
|
@@ -647,6 +662,75 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_zod2.z.union
|
|
|
647
662
|
}),
|
|
648
663
|
errorSchema
|
|
649
664
|
]);
|
|
665
|
+
var friendliProviderOptionsSchema = import_zod2.z.object({
|
|
666
|
+
/**
|
|
667
|
+
* Whether to enable parallel function calling during tool use. Default to true.
|
|
668
|
+
*/
|
|
669
|
+
parallelToolCalls: import_zod2.z.boolean().nullish(),
|
|
670
|
+
/**
|
|
671
|
+
* BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
|
|
672
|
+
*/
|
|
673
|
+
// regex: z.instanceof(RegExp).nullish(),
|
|
674
|
+
regex: import_zod2.z.string().nullish()
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
// src/friendli-tools.ts
|
|
678
|
+
function webUrlBetaTool() {
|
|
679
|
+
return {
|
|
680
|
+
type: "provider-defined",
|
|
681
|
+
id: "friendli.web:url",
|
|
682
|
+
name: "web:url",
|
|
683
|
+
args: {}
|
|
684
|
+
};
|
|
685
|
+
}
|
|
686
|
+
function webSearchBetaTool() {
|
|
687
|
+
return {
|
|
688
|
+
type: "provider-defined",
|
|
689
|
+
id: "friendli.web:search",
|
|
690
|
+
name: "web:search",
|
|
691
|
+
args: {}
|
|
692
|
+
};
|
|
693
|
+
}
|
|
694
|
+
function mathCalendarBetaTool() {
|
|
695
|
+
return {
|
|
696
|
+
type: "provider-defined",
|
|
697
|
+
id: "friendli.math:calendar",
|
|
698
|
+
name: "math:calendar",
|
|
699
|
+
args: {}
|
|
700
|
+
};
|
|
701
|
+
}
|
|
702
|
+
function mathStatisticsBetaTool() {
|
|
703
|
+
return {
|
|
704
|
+
type: "provider-defined",
|
|
705
|
+
id: "friendli.math:statistics",
|
|
706
|
+
name: "math:statistics",
|
|
707
|
+
args: {}
|
|
708
|
+
};
|
|
709
|
+
}
|
|
710
|
+
function mathCalculatorBetaTool() {
|
|
711
|
+
return {
|
|
712
|
+
type: "provider-defined",
|
|
713
|
+
id: "friendli.math:calculator",
|
|
714
|
+
name: "math:calculator",
|
|
715
|
+
args: {}
|
|
716
|
+
};
|
|
717
|
+
}
|
|
718
|
+
function codePythonInterpreterBetaTool() {
|
|
719
|
+
return {
|
|
720
|
+
type: "provider-defined",
|
|
721
|
+
id: "friendli.code:python-interpreter",
|
|
722
|
+
name: "code:python-interpreter",
|
|
723
|
+
args: {}
|
|
724
|
+
};
|
|
725
|
+
}
|
|
726
|
+
var friendliTools = {
|
|
727
|
+
webSearchBetaTool,
|
|
728
|
+
webUrlBetaTool,
|
|
729
|
+
mathCalendarBetaTool,
|
|
730
|
+
mathStatisticsBetaTool,
|
|
731
|
+
mathCalculatorBetaTool,
|
|
732
|
+
codePythonInterpreterBetaTool
|
|
733
|
+
};
|
|
650
734
|
|
|
651
735
|
// src/friendli-provider.ts
|
|
652
736
|
function createFriendli(options = {}) {
|
|
@@ -659,54 +743,50 @@ function createFriendli(options = {}) {
|
|
|
659
743
|
"X-Friendli-Team": options.teamId,
|
|
660
744
|
...options.headers
|
|
661
745
|
});
|
|
662
|
-
const baseURLAutoSelect = (modelId,
|
|
663
|
-
const customBaseURL = (0, import_provider_utils3.withoutTrailingSlash)(baseURL);
|
|
664
|
-
if (typeof customBaseURL === "string") {
|
|
665
|
-
return { baseURL: customBaseURL, type: "custom" };
|
|
666
|
-
}
|
|
746
|
+
const baseURLAutoSelect = (modelId, baseURL) => {
|
|
667
747
|
const FriendliBaseURL = {
|
|
668
|
-
beta: "https://api.friendli.ai/serverless/beta",
|
|
669
748
|
serverless: "https://api.friendli.ai/serverless/v1",
|
|
670
|
-
|
|
749
|
+
serverless_tools: "https://api.friendli.ai/serverless/tools/v1",
|
|
671
750
|
dedicated: "https://api.friendli.ai/dedicated/v1"
|
|
672
751
|
};
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
type: "beta"
|
|
677
|
-
};
|
|
752
|
+
const customBaseURL = (0, import_provider_utils3.withoutTrailingSlash)(baseURL);
|
|
753
|
+
if (typeof customBaseURL === "string" && customBaseURL !== "dedicated" && customBaseURL !== "serverless" && customBaseURL !== "serverless-tools") {
|
|
754
|
+
return { baseURL: customBaseURL, type: "custom" };
|
|
678
755
|
}
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
endpoint === "serverless" || endpoint === "auto" && Object.values(FriendliAIServerlessModelIds).includes(
|
|
682
|
-
modelId
|
|
683
|
-
)
|
|
684
|
-
) {
|
|
685
|
-
if (tools && tools.length > 0) {
|
|
756
|
+
switch (baseURL) {
|
|
757
|
+
case "dedicated":
|
|
686
758
|
return {
|
|
687
|
-
baseURL: FriendliBaseURL.
|
|
688
|
-
type: "
|
|
759
|
+
baseURL: FriendliBaseURL.dedicated,
|
|
760
|
+
type: "dedicated"
|
|
689
761
|
};
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
762
|
+
case "serverless":
|
|
763
|
+
return {
|
|
764
|
+
baseURL: FriendliBaseURL.serverless,
|
|
765
|
+
type: "serverless"
|
|
766
|
+
};
|
|
767
|
+
case "serverless-tools":
|
|
768
|
+
return {
|
|
769
|
+
baseURL: FriendliBaseURL.serverless_tools,
|
|
770
|
+
type: "serverless-tools"
|
|
771
|
+
};
|
|
772
|
+
default:
|
|
773
|
+
if (FriendliAIServerlessModelIds.includes(
|
|
774
|
+
modelId
|
|
775
|
+
)) {
|
|
776
|
+
return {
|
|
777
|
+
baseURL: FriendliBaseURL.serverless,
|
|
778
|
+
type: "serverless"
|
|
779
|
+
};
|
|
780
|
+
} else {
|
|
781
|
+
return {
|
|
782
|
+
baseURL: FriendliBaseURL.dedicated,
|
|
783
|
+
type: "dedicated"
|
|
784
|
+
};
|
|
785
|
+
}
|
|
700
786
|
}
|
|
701
787
|
};
|
|
702
788
|
const createLanguageModel = (modelId) => {
|
|
703
|
-
const { baseURL, type } = baseURLAutoSelect(
|
|
704
|
-
modelId,
|
|
705
|
-
// settings.endpoint || 'auto',
|
|
706
|
-
"auto",
|
|
707
|
-
options.baseURL
|
|
708
|
-
// settings.tools,
|
|
709
|
-
);
|
|
789
|
+
const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);
|
|
710
790
|
return new FriendliAIChatLanguageModel(modelId, {
|
|
711
791
|
provider: `friendliai.${type}.chat`,
|
|
712
792
|
url: ({ path }) => `${baseURL}${path}`,
|
|
@@ -715,12 +795,7 @@ function createFriendli(options = {}) {
|
|
|
715
795
|
});
|
|
716
796
|
};
|
|
717
797
|
const createCompletionModel = (modelId) => {
|
|
718
|
-
const { baseURL, type } = baseURLAutoSelect(
|
|
719
|
-
modelId,
|
|
720
|
-
// settings.endpoint || 'auto',
|
|
721
|
-
"auto",
|
|
722
|
-
options.baseURL
|
|
723
|
-
);
|
|
798
|
+
const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);
|
|
724
799
|
return new import_openai_compatible.OpenAICompatibleCompletionLanguageModel(modelId, {
|
|
725
800
|
provider: `friendliai.${type}.completion`,
|
|
726
801
|
url: ({ path }) => `${baseURL}${path}`,
|
|
@@ -750,6 +825,7 @@ function createFriendli(options = {}) {
|
|
|
750
825
|
provider.imageModel = createImageModel;
|
|
751
826
|
provider.transcription = createTranscriptionModel;
|
|
752
827
|
provider.speech = createSpeechModel;
|
|
828
|
+
provider.tools = friendliTools;
|
|
753
829
|
return provider;
|
|
754
830
|
}
|
|
755
831
|
var friendli = createFriendli();
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: 'auto' | 'dedicated' | 'serverless' | 'beta',\n baseURL: string | undefined,\n tools?: Array<unknown>,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'tools' | 'custom' | 'beta'\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (typeof customBaseURL === 'string') {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n const FriendliBaseURL = {\n beta: 'https://api.friendli.ai/serverless/beta',\n serverless: 'https://api.friendli.ai/serverless/v1',\n tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n if (endpoint === 'beta') {\n return {\n baseURL: FriendliBaseURL.beta,\n type: 'beta',\n }\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === 'serverless' ||\n (endpoint === 'auto' &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: 'tools',\n }\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n // settings.endpoint || 'auto',\n 'auto',\n options.baseURL,\n // settings.tools,\n )\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n // settings.endpoint || 'auto',\n 'auto',\n options.baseURL,\n )\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n\n// interface FriendliAISharedSettings {\n// /**\n// * Sets the endpoint to which the request will be sent.\n// * auto: automatically selected based on model_id\n// * dedicated: Fixed to \"/dedicated/v1\"\n// * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n// * Ignored if baseURL is specified.\n// */\n// endpoint?: 'auto' | 'dedicated' | 'serverless'\n// }\n\n// export interface FriendliAIChatSettings\n// extends FriendliAISharedSettings,\n// OpenAICompatibleChatSettings {\n// /**\n// * BETA FEATURE: Include the model's training loss in the response.\n// */\n// tools?: Array<{\n// type:\n// | 'web:url'\n// | 'web:search'\n// | 'math:calendar'\n// | 'math:statistics'\n// | 'math:calculator'\n// | 'code:python-interpreter'\n// }>\n\n// /**\n// * Whether to enable parallel function calling during tool use. Default to true.\n// */\n// parallelToolCalls?: boolean\n\n// /**\n// * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n// */\n// regex?: RegExp\n// }\n\n// export interface FriendliAICompletionSettings\n// extends FriendliAISharedSettings,\n// OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n // providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n // | Array<{\n // type: string\n // files?: string[]\n // }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }> = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA,IAAAC,mBAUO;AACP,IAAAC,yBAYO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC9BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9FO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAjElC;AAgFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AApGtB;AAqGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArHnD;AAsHI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAhNjE;AAiNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAlT/D;AAmTI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AApYvC,gBAAAC,KAAA;AAsYY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAvmB5B,gBAAAD,KAAA;AAwmBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AF5uBI,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA;AAAA,MAEA;AAAA,MACA,QAAQ;AAAA;AAAA,IAEV;AAEA,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA;AAAA,MAEA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\nimport { regex } from 'zod/v4'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV1ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFtGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAnElC;AAkFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAtGtB;AAuGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA1HL;AA2HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlOjE;AAmOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AApU/D;AAqUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAtZvC,gBAAAC,KAAA;AAwZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAznB5B,gBAAAD,KAAA;AA0nBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAC5B,CAAC;;;AGt2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
package/dist/index.mjs
CHANGED
|
@@ -26,6 +26,7 @@ import {
|
|
|
26
26
|
createJsonResponseHandler,
|
|
27
27
|
generateId,
|
|
28
28
|
isParsableJson,
|
|
29
|
+
parseProviderOptions,
|
|
29
30
|
postJsonToApi
|
|
30
31
|
} from "@ai-sdk/provider-utils";
|
|
31
32
|
import {
|
|
@@ -58,6 +59,7 @@ function prepareTools({
|
|
|
58
59
|
tools,
|
|
59
60
|
toolChoice
|
|
60
61
|
}) {
|
|
62
|
+
var _a;
|
|
61
63
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
62
64
|
const toolWarnings = [];
|
|
63
65
|
if (tools == null) {
|
|
@@ -66,7 +68,10 @@ function prepareTools({
|
|
|
66
68
|
const openaiCompatTools = [];
|
|
67
69
|
for (const tool of tools) {
|
|
68
70
|
if (tool.type === "provider-defined") {
|
|
69
|
-
|
|
71
|
+
openaiCompatTools.push({
|
|
72
|
+
// NOTE: It would be better to use tool.name, but since ":" is replaced with "_", the following code is used instead
|
|
73
|
+
type: (_a = tool.id.split(".")[1]) != null ? _a : "unknown"
|
|
74
|
+
});
|
|
70
75
|
} else {
|
|
71
76
|
openaiCompatTools.push({
|
|
72
77
|
type: "function",
|
|
@@ -137,18 +142,24 @@ var FriendliAIChatLanguageModel = class {
|
|
|
137
142
|
topK,
|
|
138
143
|
frequencyPenalty,
|
|
139
144
|
presencePenalty,
|
|
140
|
-
|
|
145
|
+
providerOptions,
|
|
141
146
|
stopSequences,
|
|
142
147
|
responseFormat,
|
|
143
148
|
seed,
|
|
144
149
|
toolChoice,
|
|
145
|
-
tools
|
|
150
|
+
tools,
|
|
151
|
+
stream
|
|
146
152
|
}) {
|
|
147
153
|
var _a;
|
|
148
154
|
const warnings = [];
|
|
149
155
|
if (topK != null) {
|
|
150
156
|
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
151
157
|
}
|
|
158
|
+
const friendliOptions = await parseProviderOptions({
|
|
159
|
+
provider: "friendli",
|
|
160
|
+
providerOptions,
|
|
161
|
+
schema: friendliProviderOptionsSchema
|
|
162
|
+
});
|
|
152
163
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
153
164
|
warnings.push({
|
|
154
165
|
type: "unsupported-setting",
|
|
@@ -171,6 +182,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
171
182
|
// model specific settings:
|
|
172
183
|
// user: compatibleOptions.user,
|
|
173
184
|
// standardized settings:
|
|
185
|
+
stream,
|
|
174
186
|
max_tokens: maxOutputTokens,
|
|
175
187
|
temperature,
|
|
176
188
|
top_p: topP,
|
|
@@ -183,7 +195,10 @@ var FriendliAIChatLanguageModel = class {
|
|
|
183
195
|
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
184
196
|
description: responseFormat.description
|
|
185
197
|
}
|
|
186
|
-
} : { type: "json_object" } : void 0
|
|
198
|
+
} : { type: "json_object" } : (friendliOptions == null ? void 0 : friendliOptions.regex) != null ? {
|
|
199
|
+
type: "regex",
|
|
200
|
+
schema: friendliOptions.regex
|
|
201
|
+
} : void 0,
|
|
187
202
|
stop: stopSequences,
|
|
188
203
|
seed,
|
|
189
204
|
// ...providerOptions?.[this.providerOptionsName],
|
|
@@ -192,14 +207,15 @@ var FriendliAIChatLanguageModel = class {
|
|
|
192
207
|
messages: convertToOpenAICompatibleChatMessages(prompt),
|
|
193
208
|
// tools:
|
|
194
209
|
tools: openaiTools,
|
|
195
|
-
tool_choice: openaiToolChoice
|
|
210
|
+
tool_choice: openaiToolChoice,
|
|
211
|
+
parallel_tool_calls: friendliOptions == null ? void 0 : friendliOptions.parallelToolCalls
|
|
196
212
|
},
|
|
197
213
|
warnings: [...warnings, ...toolWarnings]
|
|
198
214
|
};
|
|
199
215
|
}
|
|
200
216
|
async doGenerate(options) {
|
|
201
217
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
202
|
-
const { args, warnings } = await this.getArgs({ ...options });
|
|
218
|
+
const { args, warnings } = await this.getArgs({ ...options, stream: false });
|
|
203
219
|
const body = JSON.stringify(args);
|
|
204
220
|
const {
|
|
205
221
|
responseHeaders,
|
|
@@ -265,7 +281,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
265
281
|
}
|
|
266
282
|
async doStream(options) {
|
|
267
283
|
var _a;
|
|
268
|
-
const { args, warnings } = await this.getArgs({ ...options });
|
|
284
|
+
const { args, warnings } = await this.getArgs({ ...options, stream: true });
|
|
269
285
|
const body = {
|
|
270
286
|
...args,
|
|
271
287
|
stream: true,
|
|
@@ -641,6 +657,75 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z2.union([
|
|
|
641
657
|
}),
|
|
642
658
|
errorSchema
|
|
643
659
|
]);
|
|
660
|
+
var friendliProviderOptionsSchema = z2.object({
|
|
661
|
+
/**
|
|
662
|
+
* Whether to enable parallel function calling during tool use. Default to true.
|
|
663
|
+
*/
|
|
664
|
+
parallelToolCalls: z2.boolean().nullish(),
|
|
665
|
+
/**
|
|
666
|
+
* BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
|
|
667
|
+
*/
|
|
668
|
+
// regex: z.instanceof(RegExp).nullish(),
|
|
669
|
+
regex: z2.string().nullish()
|
|
670
|
+
});
|
|
671
|
+
|
|
672
|
+
// src/friendli-tools.ts
|
|
673
|
+
function webUrlBetaTool() {
|
|
674
|
+
return {
|
|
675
|
+
type: "provider-defined",
|
|
676
|
+
id: "friendli.web:url",
|
|
677
|
+
name: "web:url",
|
|
678
|
+
args: {}
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
function webSearchBetaTool() {
|
|
682
|
+
return {
|
|
683
|
+
type: "provider-defined",
|
|
684
|
+
id: "friendli.web:search",
|
|
685
|
+
name: "web:search",
|
|
686
|
+
args: {}
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
function mathCalendarBetaTool() {
|
|
690
|
+
return {
|
|
691
|
+
type: "provider-defined",
|
|
692
|
+
id: "friendli.math:calendar",
|
|
693
|
+
name: "math:calendar",
|
|
694
|
+
args: {}
|
|
695
|
+
};
|
|
696
|
+
}
|
|
697
|
+
function mathStatisticsBetaTool() {
|
|
698
|
+
return {
|
|
699
|
+
type: "provider-defined",
|
|
700
|
+
id: "friendli.math:statistics",
|
|
701
|
+
name: "math:statistics",
|
|
702
|
+
args: {}
|
|
703
|
+
};
|
|
704
|
+
}
|
|
705
|
+
function mathCalculatorBetaTool() {
|
|
706
|
+
return {
|
|
707
|
+
type: "provider-defined",
|
|
708
|
+
id: "friendli.math:calculator",
|
|
709
|
+
name: "math:calculator",
|
|
710
|
+
args: {}
|
|
711
|
+
};
|
|
712
|
+
}
|
|
713
|
+
function codePythonInterpreterBetaTool() {
|
|
714
|
+
return {
|
|
715
|
+
type: "provider-defined",
|
|
716
|
+
id: "friendli.code:python-interpreter",
|
|
717
|
+
name: "code:python-interpreter",
|
|
718
|
+
args: {}
|
|
719
|
+
};
|
|
720
|
+
}
|
|
721
|
+
var friendliTools = {
|
|
722
|
+
webSearchBetaTool,
|
|
723
|
+
webUrlBetaTool,
|
|
724
|
+
mathCalendarBetaTool,
|
|
725
|
+
mathStatisticsBetaTool,
|
|
726
|
+
mathCalculatorBetaTool,
|
|
727
|
+
codePythonInterpreterBetaTool
|
|
728
|
+
};
|
|
644
729
|
|
|
645
730
|
// src/friendli-provider.ts
|
|
646
731
|
function createFriendli(options = {}) {
|
|
@@ -653,54 +738,50 @@ function createFriendli(options = {}) {
|
|
|
653
738
|
"X-Friendli-Team": options.teamId,
|
|
654
739
|
...options.headers
|
|
655
740
|
});
|
|
656
|
-
const baseURLAutoSelect = (modelId,
|
|
657
|
-
const customBaseURL = withoutTrailingSlash(baseURL);
|
|
658
|
-
if (typeof customBaseURL === "string") {
|
|
659
|
-
return { baseURL: customBaseURL, type: "custom" };
|
|
660
|
-
}
|
|
741
|
+
const baseURLAutoSelect = (modelId, baseURL) => {
|
|
661
742
|
const FriendliBaseURL = {
|
|
662
|
-
beta: "https://api.friendli.ai/serverless/beta",
|
|
663
743
|
serverless: "https://api.friendli.ai/serverless/v1",
|
|
664
|
-
|
|
744
|
+
serverless_tools: "https://api.friendli.ai/serverless/tools/v1",
|
|
665
745
|
dedicated: "https://api.friendli.ai/dedicated/v1"
|
|
666
746
|
};
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
type: "beta"
|
|
671
|
-
};
|
|
747
|
+
const customBaseURL = withoutTrailingSlash(baseURL);
|
|
748
|
+
if (typeof customBaseURL === "string" && customBaseURL !== "dedicated" && customBaseURL !== "serverless" && customBaseURL !== "serverless-tools") {
|
|
749
|
+
return { baseURL: customBaseURL, type: "custom" };
|
|
672
750
|
}
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
endpoint === "serverless" || endpoint === "auto" && Object.values(FriendliAIServerlessModelIds).includes(
|
|
676
|
-
modelId
|
|
677
|
-
)
|
|
678
|
-
) {
|
|
679
|
-
if (tools && tools.length > 0) {
|
|
751
|
+
switch (baseURL) {
|
|
752
|
+
case "dedicated":
|
|
680
753
|
return {
|
|
681
|
-
baseURL: FriendliBaseURL.
|
|
682
|
-
type: "
|
|
754
|
+
baseURL: FriendliBaseURL.dedicated,
|
|
755
|
+
type: "dedicated"
|
|
683
756
|
};
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
757
|
+
case "serverless":
|
|
758
|
+
return {
|
|
759
|
+
baseURL: FriendliBaseURL.serverless,
|
|
760
|
+
type: "serverless"
|
|
761
|
+
};
|
|
762
|
+
case "serverless-tools":
|
|
763
|
+
return {
|
|
764
|
+
baseURL: FriendliBaseURL.serverless_tools,
|
|
765
|
+
type: "serverless-tools"
|
|
766
|
+
};
|
|
767
|
+
default:
|
|
768
|
+
if (FriendliAIServerlessModelIds.includes(
|
|
769
|
+
modelId
|
|
770
|
+
)) {
|
|
771
|
+
return {
|
|
772
|
+
baseURL: FriendliBaseURL.serverless,
|
|
773
|
+
type: "serverless"
|
|
774
|
+
};
|
|
775
|
+
} else {
|
|
776
|
+
return {
|
|
777
|
+
baseURL: FriendliBaseURL.dedicated,
|
|
778
|
+
type: "dedicated"
|
|
779
|
+
};
|
|
780
|
+
}
|
|
694
781
|
}
|
|
695
782
|
};
|
|
696
783
|
const createLanguageModel = (modelId) => {
|
|
697
|
-
const { baseURL, type } = baseURLAutoSelect(
|
|
698
|
-
modelId,
|
|
699
|
-
// settings.endpoint || 'auto',
|
|
700
|
-
"auto",
|
|
701
|
-
options.baseURL
|
|
702
|
-
// settings.tools,
|
|
703
|
-
);
|
|
784
|
+
const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);
|
|
704
785
|
return new FriendliAIChatLanguageModel(modelId, {
|
|
705
786
|
provider: `friendliai.${type}.chat`,
|
|
706
787
|
url: ({ path }) => `${baseURL}${path}`,
|
|
@@ -709,12 +790,7 @@ function createFriendli(options = {}) {
|
|
|
709
790
|
});
|
|
710
791
|
};
|
|
711
792
|
const createCompletionModel = (modelId) => {
|
|
712
|
-
const { baseURL, type } = baseURLAutoSelect(
|
|
713
|
-
modelId,
|
|
714
|
-
// settings.endpoint || 'auto',
|
|
715
|
-
"auto",
|
|
716
|
-
options.baseURL
|
|
717
|
-
);
|
|
793
|
+
const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);
|
|
718
794
|
return new OpenAICompatibleCompletionLanguageModel(modelId, {
|
|
719
795
|
provider: `friendliai.${type}.completion`,
|
|
720
796
|
url: ({ path }) => `${baseURL}${path}`,
|
|
@@ -744,6 +820,7 @@ function createFriendli(options = {}) {
|
|
|
744
820
|
provider.imageModel = createImageModel;
|
|
745
821
|
provider.transcription = createTranscriptionModel;
|
|
746
822
|
provider.speech = createSpeechModel;
|
|
823
|
+
provider.tools = friendliTools;
|
|
747
824
|
return provider;
|
|
748
825
|
}
|
|
749
826
|
var friendli = createFriendli();
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: 'auto' | 'dedicated' | 'serverless' | 'beta',\n baseURL: string | undefined,\n tools?: Array<unknown>,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'tools' | 'custom' | 'beta'\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (typeof customBaseURL === 'string') {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n const FriendliBaseURL = {\n beta: 'https://api.friendli.ai/serverless/beta',\n serverless: 'https://api.friendli.ai/serverless/v1',\n tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n if (endpoint === 'beta') {\n return {\n baseURL: FriendliBaseURL.beta,\n type: 'beta',\n }\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === 'serverless' ||\n (endpoint === 'auto' &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: 'tools',\n }\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n // settings.endpoint || 'auto',\n 'auto',\n options.baseURL,\n // settings.tools,\n )\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n // settings.endpoint || 'auto',\n 'auto',\n options.baseURL,\n )\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n\n// interface FriendliAISharedSettings {\n// /**\n// * Sets the endpoint to which the request will be sent.\n// * auto: automatically selected based on model_id\n// * dedicated: Fixed to \"/dedicated/v1\"\n// * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n// * Ignored if baseURL is specified.\n// */\n// endpoint?: 'auto' | 'dedicated' | 'serverless'\n// }\n\n// export interface FriendliAIChatSettings\n// extends FriendliAISharedSettings,\n// OpenAICompatibleChatSettings {\n// /**\n// * BETA FEATURE: Include the model's training loss in the response.\n// */\n// tools?: Array<{\n// type:\n// | 'web:url'\n// | 'web:search'\n// | 'math:calendar'\n// | 'math:statistics'\n// | 'math:calculator'\n// | 'code:python-interpreter'\n// }>\n\n// /**\n// * Whether to enable parallel function calling during tool use. Default to true.\n// */\n// parallelToolCalls?: boolean\n\n// /**\n// * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n// */\n// regex?: RegExp\n// }\n\n// export interface FriendliAICompletionSettings\n// extends FriendliAISharedSettings,\n// OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n // providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n // | Array<{\n // type: string\n // files?: string[]\n // }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }> = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC9BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9FO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAjElC;AAgFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AApGtB;AAqGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArHnD;AAsHI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAhNjE;AAiNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAlT/D;AAmTI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AApYvC,gBAAAC,KAAA;AAsYY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAvmB5B,gBAAAD,KAAA;AAwmBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AF5uBI,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA;AAAA,MAEA;AAAA,MACA,QAAQ;AAAA;AAAA,IAEV;AAEA,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA;AAAA,MAEA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\nimport { regex } from 'zod/v4'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV1ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFtGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAnElC;AAkFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAtGtB;AAuGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA1HL;AA2HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlOjE;AAmOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AApU/D;AAqUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAtZvC,gBAAAC,KAAA;AAwZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAznB5B,gBAAAD,KAAA;AA0nBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAC5B,CAAC;;;AGt2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z"]}
|