@friendliai/ai-provider 0.3.0-beta.6 → 0.3.0-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +30 -1
- package/dist/index.d.ts +30 -1
- package/dist/index.js +119 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +118 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,12 @@
|
|
|
1
1
|
# @friendliai/ai-provider
|
|
2
2
|
|
|
3
|
+
## 0.3.0-beta.7
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- ac374ab: Add getAvailableModels() for dynamic model discovery
|
|
8
|
+
ref: https://vercel.com/docs/ai-gateway/models-and-providers#dynamic-model-discovery
|
|
9
|
+
|
|
3
10
|
## 0.3.0-beta.6
|
|
4
11
|
|
|
5
12
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -6,6 +6,27 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
type Pricing = {
|
|
10
|
+
inputToken?: number;
|
|
11
|
+
outputToken?: number;
|
|
12
|
+
responseTime?: number;
|
|
13
|
+
unitType?: 'TOKEN' | 'SECOND';
|
|
14
|
+
currency?: string;
|
|
15
|
+
unit?: string;
|
|
16
|
+
};
|
|
17
|
+
type FriendliAvailableModel = {
|
|
18
|
+
id: string;
|
|
19
|
+
name?: string | null;
|
|
20
|
+
description?: string | null;
|
|
21
|
+
pricing?: Pricing;
|
|
22
|
+
warm?: boolean;
|
|
23
|
+
cold?: boolean;
|
|
24
|
+
contextLength?: number | null;
|
|
25
|
+
};
|
|
26
|
+
type FriendliAvailableModelsResponse = {
|
|
27
|
+
models: FriendliAvailableModel[];
|
|
28
|
+
};
|
|
29
|
+
|
|
9
30
|
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
31
|
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
32
|
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
@@ -65,6 +86,14 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
65
86
|
* Creates a text embedding model for text generation.
|
|
66
87
|
* TODO: Implement for Dedicated users
|
|
67
88
|
*/
|
|
89
|
+
embedding(modelId: string & {}): LanguageModelV2;
|
|
90
|
+
textEmbeddingModel(modelId: string & {}): LanguageModelV2;
|
|
91
|
+
/**
|
|
92
|
+
* Returns the available models and their metadata.
|
|
93
|
+
*/
|
|
94
|
+
getAvailableModels(options?: {
|
|
95
|
+
graphqlURL?: string;
|
|
96
|
+
}): Promise<FriendliAvailableModelsResponse>;
|
|
68
97
|
embedding(modelId: string & {}): EmbeddingModelV2<string>;
|
|
69
98
|
textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;
|
|
70
99
|
/**
|
|
@@ -102,4 +131,4 @@ declare const friendliaiErrorSchema: z.ZodObject<{
|
|
|
102
131
|
}, z.core.$strip>;
|
|
103
132
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
104
133
|
|
|
105
|
-
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
|
134
|
+
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, type FriendliAvailableModel, type FriendliAvailableModelsResponse, createFriendli, friendli };
|
package/dist/index.d.ts
CHANGED
|
@@ -6,6 +6,27 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
type Pricing = {
|
|
10
|
+
inputToken?: number;
|
|
11
|
+
outputToken?: number;
|
|
12
|
+
responseTime?: number;
|
|
13
|
+
unitType?: 'TOKEN' | 'SECOND';
|
|
14
|
+
currency?: string;
|
|
15
|
+
unit?: string;
|
|
16
|
+
};
|
|
17
|
+
type FriendliAvailableModel = {
|
|
18
|
+
id: string;
|
|
19
|
+
name?: string | null;
|
|
20
|
+
description?: string | null;
|
|
21
|
+
pricing?: Pricing;
|
|
22
|
+
warm?: boolean;
|
|
23
|
+
cold?: boolean;
|
|
24
|
+
contextLength?: number | null;
|
|
25
|
+
};
|
|
26
|
+
type FriendliAvailableModelsResponse = {
|
|
27
|
+
models: FriendliAvailableModel[];
|
|
28
|
+
};
|
|
29
|
+
|
|
9
30
|
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
31
|
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
32
|
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
@@ -65,6 +86,14 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
65
86
|
* Creates a text embedding model for text generation.
|
|
66
87
|
* TODO: Implement for Dedicated users
|
|
67
88
|
*/
|
|
89
|
+
embedding(modelId: string & {}): LanguageModelV2;
|
|
90
|
+
textEmbeddingModel(modelId: string & {}): LanguageModelV2;
|
|
91
|
+
/**
|
|
92
|
+
* Returns the available models and their metadata.
|
|
93
|
+
*/
|
|
94
|
+
getAvailableModels(options?: {
|
|
95
|
+
graphqlURL?: string;
|
|
96
|
+
}): Promise<FriendliAvailableModelsResponse>;
|
|
68
97
|
embedding(modelId: string & {}): EmbeddingModelV2<string>;
|
|
69
98
|
textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;
|
|
70
99
|
/**
|
|
@@ -102,4 +131,4 @@ declare const friendliaiErrorSchema: z.ZodObject<{
|
|
|
102
131
|
}, z.core.$strip>;
|
|
103
132
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
104
133
|
|
|
105
|
-
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
|
134
|
+
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, type FriendliAvailableModel, type FriendliAvailableModelsResponse, createFriendli, friendli };
|
package/dist/index.js
CHANGED
|
@@ -27,7 +27,7 @@ module.exports = __toCommonJS(index_exports);
|
|
|
27
27
|
|
|
28
28
|
// src/friendli-provider.ts
|
|
29
29
|
var import_provider3 = require("@ai-sdk/provider");
|
|
30
|
-
var
|
|
30
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
31
31
|
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
32
32
|
|
|
33
33
|
// src/friendli-settings.ts
|
|
@@ -689,6 +689,113 @@ var friendliProviderOptionsSchema = import_zod2.z.object({
|
|
|
689
689
|
chat_template_kwargs: import_zod2.z.record(import_zod2.z.string(), import_zod2.z.any()).nullish()
|
|
690
690
|
});
|
|
691
691
|
|
|
692
|
+
// src/get-available-models.ts
|
|
693
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
694
|
+
var DEFAULT_GRAPHQL_URL = "https://api-internal.friendli.ai/api/graphql";
|
|
695
|
+
async function postGraphQL(url, body, headers) {
|
|
696
|
+
const res = await fetch(url, {
|
|
697
|
+
method: "POST",
|
|
698
|
+
headers: {
|
|
699
|
+
"Content-Type": "application/json",
|
|
700
|
+
...headers
|
|
701
|
+
},
|
|
702
|
+
body: JSON.stringify(body)
|
|
703
|
+
});
|
|
704
|
+
let json;
|
|
705
|
+
try {
|
|
706
|
+
json = await res.json();
|
|
707
|
+
} catch (err) {
|
|
708
|
+
console.error(
|
|
709
|
+
"Failed to parse JSON response from Friendli API:",
|
|
710
|
+
err,
|
|
711
|
+
"Status:",
|
|
712
|
+
res.status,
|
|
713
|
+
res.statusText
|
|
714
|
+
);
|
|
715
|
+
throw new Error(
|
|
716
|
+
`Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`
|
|
717
|
+
);
|
|
718
|
+
}
|
|
719
|
+
return json;
|
|
720
|
+
}
|
|
721
|
+
function normalizePriceUnit(unit) {
|
|
722
|
+
if (!unit) return void 0;
|
|
723
|
+
return unit;
|
|
724
|
+
}
|
|
725
|
+
async function getAvailableModelsImpl(options) {
|
|
726
|
+
var _a, _b, _c, _d, _e, _f;
|
|
727
|
+
let token;
|
|
728
|
+
try {
|
|
729
|
+
token = (_a = options.apiKey) != null ? _a : (0, import_provider_utils3.loadApiKey)({
|
|
730
|
+
apiKey: void 0,
|
|
731
|
+
environmentVariableName: "FRIENDLI_TOKEN",
|
|
732
|
+
description: "FRIENDLI_TOKEN"
|
|
733
|
+
});
|
|
734
|
+
} catch (e) {
|
|
735
|
+
token = void 0;
|
|
736
|
+
}
|
|
737
|
+
const headers = {
|
|
738
|
+
...token ? { Authorization: `Bearer ${token}` } : {},
|
|
739
|
+
...options.teamId ? { "X-Friendli-Team": options.teamId } : {},
|
|
740
|
+
...(_b = options.headers) != null ? _b : {}
|
|
741
|
+
};
|
|
742
|
+
const url = (_c = options.graphqlURL) != null ? _c : DEFAULT_GRAPHQL_URL;
|
|
743
|
+
const query = `
|
|
744
|
+
query Edges {
|
|
745
|
+
serverlessEndpoints {
|
|
746
|
+
edges {
|
|
747
|
+
... on ServerlessChatEndpointCatalog {
|
|
748
|
+
id
|
|
749
|
+
name
|
|
750
|
+
status
|
|
751
|
+
price {
|
|
752
|
+
inputPrice
|
|
753
|
+
outputPrice
|
|
754
|
+
unit
|
|
755
|
+
responseTimePrice
|
|
756
|
+
priceUnitType
|
|
757
|
+
}
|
|
758
|
+
contextLength
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
`;
|
|
764
|
+
const resp = await postGraphQL(
|
|
765
|
+
url,
|
|
766
|
+
{ query, variables: {}, operationName: "Edges" },
|
|
767
|
+
headers
|
|
768
|
+
);
|
|
769
|
+
if (resp.errors && resp.errors.length > 0) {
|
|
770
|
+
throw new Error(
|
|
771
|
+
`getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join("; ")}`
|
|
772
|
+
);
|
|
773
|
+
}
|
|
774
|
+
const edges = (_f = (_e = (_d = resp.data) == null ? void 0 : _d.serverlessEndpoints) == null ? void 0 : _e.edges) != null ? _f : [];
|
|
775
|
+
const models = edges.map((e) => {
|
|
776
|
+
var _a2, _b2, _c2, _d2, _e2, _f2;
|
|
777
|
+
const warm = e.status === "WARM";
|
|
778
|
+
const pricing = e.price ? {
|
|
779
|
+
inputToken: (_a2 = e.price.inputPrice) != null ? _a2 : void 0,
|
|
780
|
+
outputToken: (_b2 = e.price.outputPrice) != null ? _b2 : void 0,
|
|
781
|
+
responseTime: (_c2 = e.price.responseTimePrice) != null ? _c2 : void 0,
|
|
782
|
+
unitType: (_d2 = e.price.priceUnitType) != null ? _d2 : void 0,
|
|
783
|
+
unit: normalizePriceUnit(e.price.unit),
|
|
784
|
+
currency: "USD"
|
|
785
|
+
} : void 0;
|
|
786
|
+
return {
|
|
787
|
+
id: e.id,
|
|
788
|
+
name: (_e2 = e.name) != null ? _e2 : void 0,
|
|
789
|
+
description: void 0,
|
|
790
|
+
pricing,
|
|
791
|
+
warm,
|
|
792
|
+
cold: warm === false,
|
|
793
|
+
contextLength: (_f2 = e.contextLength) != null ? _f2 : void 0
|
|
794
|
+
};
|
|
795
|
+
});
|
|
796
|
+
return { models };
|
|
797
|
+
}
|
|
798
|
+
|
|
692
799
|
// src/friendli-tools.ts
|
|
693
800
|
function webUrlBetaTool() {
|
|
694
801
|
return {
|
|
@@ -750,7 +857,7 @@ var friendliTools = {
|
|
|
750
857
|
// src/friendli-provider.ts
|
|
751
858
|
function createFriendli(options = {}) {
|
|
752
859
|
const getHeaders = () => ({
|
|
753
|
-
Authorization: `Bearer ${(0,
|
|
860
|
+
Authorization: `Bearer ${(0, import_provider_utils4.loadApiKey)({
|
|
754
861
|
apiKey: options.apiKey,
|
|
755
862
|
environmentVariableName: "FRIENDLI_TOKEN",
|
|
756
863
|
description: "FRIENDLI_TOKEN"
|
|
@@ -764,7 +871,7 @@ function createFriendli(options = {}) {
|
|
|
764
871
|
serverless_tools: "https://api.friendli.ai/serverless/tools/v1",
|
|
765
872
|
dedicated: "https://api.friendli.ai/dedicated/v1"
|
|
766
873
|
};
|
|
767
|
-
const customBaseURL = (0,
|
|
874
|
+
const customBaseURL = (0, import_provider_utils4.withoutTrailingSlash)(baseURL);
|
|
768
875
|
if (typeof customBaseURL === "string" && customBaseURL !== "dedicated" && customBaseURL !== "serverless" && customBaseURL !== "serverless-tools") {
|
|
769
876
|
return { baseURL: customBaseURL, type: "custom" };
|
|
770
877
|
}
|
|
@@ -837,6 +944,15 @@ function createFriendli(options = {}) {
|
|
|
837
944
|
provider.completion = createCompletionModel;
|
|
838
945
|
provider.embedding = createTextEmbeddingModel;
|
|
839
946
|
provider.textEmbeddingModel = createTextEmbeddingModel;
|
|
947
|
+
provider.getAvailableModels = async (opts) => {
|
|
948
|
+
var _a;
|
|
949
|
+
const defaultURL = "https://api-internal.friendli.ai/api/graphql";
|
|
950
|
+
const graphqlURL = (_a = opts == null ? void 0 : opts.graphqlURL) != null ? _a : defaultURL;
|
|
951
|
+
const apiKey = options.apiKey;
|
|
952
|
+
const teamId = options.teamId;
|
|
953
|
+
const headers = options.headers;
|
|
954
|
+
return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL });
|
|
955
|
+
};
|
|
840
956
|
provider.imageModel = createImageModel;
|
|
841
957
|
provider.transcription = createTranscriptionModel;
|
|
842
958
|
provider.speech = createSpeechModel;
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsB,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGz2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\nexport type {\n FriendliAvailableModelsResponse,\n FriendliAvailableModel,\n} from './get-available-models'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { getAvailableModelsImpl } from './get-available-models'\nimport type { FriendliAvailableModelsResponse } from './get-available-models'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2\n textEmbeddingModel(modelId: string & {}): LanguageModelV2\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: {\n graphqlURL?: string\n }): Promise<FriendliAvailableModelsResponse>\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n ;(provider as unknown as FriendliAIProvider).getAvailableModels =\n async (opts?: { graphqlURL?: string }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql'\n const graphqlURL = opts?.graphqlURL ?? defaultURL\n const apiKey = options.apiKey\n const teamId = options.teamId\n const headers = options.headers\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL })\n }\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils'\n\ntype Pricing = {\n inputToken?: number\n outputToken?: number\n responseTime?: number\n unitType?: 'TOKEN' | 'SECOND'\n currency?: string\n unit?: string\n}\n\nexport type FriendliAvailableModel = {\n id: string\n name?: string | null\n description?: string | null\n pricing?: Pricing\n warm?: boolean\n cold?: boolean\n contextLength?: number | null\n}\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[]\n}\n\ntype GraphQLResponse<T> = {\n data?: T\n errors?: Array<{ message: string }>\n}\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql'\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string\n variables?: Record<string, unknown>\n operationName?: string\n },\n headers: Record<string, string>,\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n })\n\n let json: GraphQLResponse<T>\n try {\n json = await res.json()\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText,\n )\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`,\n )\n }\n return json\n}\n\ntype ServerlessEndpointEdge = {\n id: string\n name?: string | null\n status?: 'WARM' | 'COLD' | string | null\n price?: {\n inputPrice?: number | null\n outputPrice?: number | null\n unit?: string | null\n responseTimePrice?: number | null\n priceUnitType?: 'TOKEN' | 'SECOND' | null\n } | null\n contextLength?: number | null\n}\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[]\n } | null\n}\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined\n return unit\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string\n teamId?: string\n headers?: Record<string, string>\n graphqlURL?: string\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })\n } catch {\n token = undefined\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n }\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers,\n )\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`,\n )\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? []\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM'\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as\n | 'TOKEN'\n | 'SECOND'\n | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n }\n })\n\n return { models }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsB,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AG32BD,IAAAE,yBAA2B;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,gBACA,mCAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MAIpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACtLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AN8CO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC7B,EAAC,SAA2C,qBAC3C,OAAO,SAAmC;AAzO9C;AA0OM,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACF,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall","import_provider_utils","_a","_b","_c","_d","_e","_f"]}
|
package/dist/index.mjs
CHANGED
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
NoSuchModelError
|
|
4
4
|
} from "@ai-sdk/provider";
|
|
5
5
|
import {
|
|
6
|
-
loadApiKey,
|
|
6
|
+
loadApiKey as loadApiKey2,
|
|
7
7
|
withoutTrailingSlash
|
|
8
8
|
} from "@ai-sdk/provider-utils";
|
|
9
9
|
import { OpenAICompatibleCompletionLanguageModel } from "@ai-sdk/openai-compatible";
|
|
@@ -684,6 +684,113 @@ var friendliProviderOptionsSchema = z2.object({
|
|
|
684
684
|
chat_template_kwargs: z2.record(z2.string(), z2.any()).nullish()
|
|
685
685
|
});
|
|
686
686
|
|
|
687
|
+
// src/get-available-models.ts
|
|
688
|
+
import { loadApiKey } from "@ai-sdk/provider-utils";
|
|
689
|
+
var DEFAULT_GRAPHQL_URL = "https://api-internal.friendli.ai/api/graphql";
|
|
690
|
+
async function postGraphQL(url, body, headers) {
|
|
691
|
+
const res = await fetch(url, {
|
|
692
|
+
method: "POST",
|
|
693
|
+
headers: {
|
|
694
|
+
"Content-Type": "application/json",
|
|
695
|
+
...headers
|
|
696
|
+
},
|
|
697
|
+
body: JSON.stringify(body)
|
|
698
|
+
});
|
|
699
|
+
let json;
|
|
700
|
+
try {
|
|
701
|
+
json = await res.json();
|
|
702
|
+
} catch (err) {
|
|
703
|
+
console.error(
|
|
704
|
+
"Failed to parse JSON response from Friendli API:",
|
|
705
|
+
err,
|
|
706
|
+
"Status:",
|
|
707
|
+
res.status,
|
|
708
|
+
res.statusText
|
|
709
|
+
);
|
|
710
|
+
throw new Error(
|
|
711
|
+
`Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`
|
|
712
|
+
);
|
|
713
|
+
}
|
|
714
|
+
return json;
|
|
715
|
+
}
|
|
716
|
+
function normalizePriceUnit(unit) {
|
|
717
|
+
if (!unit) return void 0;
|
|
718
|
+
return unit;
|
|
719
|
+
}
|
|
720
|
+
async function getAvailableModelsImpl(options) {
|
|
721
|
+
var _a, _b, _c, _d, _e, _f;
|
|
722
|
+
let token;
|
|
723
|
+
try {
|
|
724
|
+
token = (_a = options.apiKey) != null ? _a : loadApiKey({
|
|
725
|
+
apiKey: void 0,
|
|
726
|
+
environmentVariableName: "FRIENDLI_TOKEN",
|
|
727
|
+
description: "FRIENDLI_TOKEN"
|
|
728
|
+
});
|
|
729
|
+
} catch (e) {
|
|
730
|
+
token = void 0;
|
|
731
|
+
}
|
|
732
|
+
const headers = {
|
|
733
|
+
...token ? { Authorization: `Bearer ${token}` } : {},
|
|
734
|
+
...options.teamId ? { "X-Friendli-Team": options.teamId } : {},
|
|
735
|
+
...(_b = options.headers) != null ? _b : {}
|
|
736
|
+
};
|
|
737
|
+
const url = (_c = options.graphqlURL) != null ? _c : DEFAULT_GRAPHQL_URL;
|
|
738
|
+
const query = `
|
|
739
|
+
query Edges {
|
|
740
|
+
serverlessEndpoints {
|
|
741
|
+
edges {
|
|
742
|
+
... on ServerlessChatEndpointCatalog {
|
|
743
|
+
id
|
|
744
|
+
name
|
|
745
|
+
status
|
|
746
|
+
price {
|
|
747
|
+
inputPrice
|
|
748
|
+
outputPrice
|
|
749
|
+
unit
|
|
750
|
+
responseTimePrice
|
|
751
|
+
priceUnitType
|
|
752
|
+
}
|
|
753
|
+
contextLength
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
`;
|
|
759
|
+
const resp = await postGraphQL(
|
|
760
|
+
url,
|
|
761
|
+
{ query, variables: {}, operationName: "Edges" },
|
|
762
|
+
headers
|
|
763
|
+
);
|
|
764
|
+
if (resp.errors && resp.errors.length > 0) {
|
|
765
|
+
throw new Error(
|
|
766
|
+
`getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join("; ")}`
|
|
767
|
+
);
|
|
768
|
+
}
|
|
769
|
+
const edges = (_f = (_e = (_d = resp.data) == null ? void 0 : _d.serverlessEndpoints) == null ? void 0 : _e.edges) != null ? _f : [];
|
|
770
|
+
const models = edges.map((e) => {
|
|
771
|
+
var _a2, _b2, _c2, _d2, _e2, _f2;
|
|
772
|
+
const warm = e.status === "WARM";
|
|
773
|
+
const pricing = e.price ? {
|
|
774
|
+
inputToken: (_a2 = e.price.inputPrice) != null ? _a2 : void 0,
|
|
775
|
+
outputToken: (_b2 = e.price.outputPrice) != null ? _b2 : void 0,
|
|
776
|
+
responseTime: (_c2 = e.price.responseTimePrice) != null ? _c2 : void 0,
|
|
777
|
+
unitType: (_d2 = e.price.priceUnitType) != null ? _d2 : void 0,
|
|
778
|
+
unit: normalizePriceUnit(e.price.unit),
|
|
779
|
+
currency: "USD"
|
|
780
|
+
} : void 0;
|
|
781
|
+
return {
|
|
782
|
+
id: e.id,
|
|
783
|
+
name: (_e2 = e.name) != null ? _e2 : void 0,
|
|
784
|
+
description: void 0,
|
|
785
|
+
pricing,
|
|
786
|
+
warm,
|
|
787
|
+
cold: warm === false,
|
|
788
|
+
contextLength: (_f2 = e.contextLength) != null ? _f2 : void 0
|
|
789
|
+
};
|
|
790
|
+
});
|
|
791
|
+
return { models };
|
|
792
|
+
}
|
|
793
|
+
|
|
687
794
|
// src/friendli-tools.ts
|
|
688
795
|
function webUrlBetaTool() {
|
|
689
796
|
return {
|
|
@@ -745,7 +852,7 @@ var friendliTools = {
|
|
|
745
852
|
// src/friendli-provider.ts
|
|
746
853
|
function createFriendli(options = {}) {
|
|
747
854
|
const getHeaders = () => ({
|
|
748
|
-
Authorization: `Bearer ${
|
|
855
|
+
Authorization: `Bearer ${loadApiKey2({
|
|
749
856
|
apiKey: options.apiKey,
|
|
750
857
|
environmentVariableName: "FRIENDLI_TOKEN",
|
|
751
858
|
description: "FRIENDLI_TOKEN"
|
|
@@ -832,6 +939,15 @@ function createFriendli(options = {}) {
|
|
|
832
939
|
provider.completion = createCompletionModel;
|
|
833
940
|
provider.embedding = createTextEmbeddingModel;
|
|
834
941
|
provider.textEmbeddingModel = createTextEmbeddingModel;
|
|
942
|
+
provider.getAvailableModels = async (opts) => {
|
|
943
|
+
var _a;
|
|
944
|
+
const defaultURL = "https://api-internal.friendli.ai/api/graphql";
|
|
945
|
+
const graphqlURL = (_a = opts == null ? void 0 : opts.graphqlURL) != null ? _a : defaultURL;
|
|
946
|
+
const apiKey = options.apiKey;
|
|
947
|
+
const teamId = options.teamId;
|
|
948
|
+
const headers = options.headers;
|
|
949
|
+
return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL });
|
|
950
|
+
};
|
|
835
951
|
provider.imageModel = createImageModel;
|
|
836
952
|
provider.transcription = createTranscriptionModel;
|
|
837
953
|
provider.speech = createSpeechModel;
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsBA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGz2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { getAvailableModelsImpl } from './get-available-models'\nimport type { FriendliAvailableModelsResponse } from './get-available-models'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2\n textEmbeddingModel(modelId: string & {}): LanguageModelV2\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: {\n graphqlURL?: string\n }): Promise<FriendliAvailableModelsResponse>\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n ;(provider as unknown as FriendliAIProvider).getAvailableModels =\n async (opts?: { graphqlURL?: string }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql'\n const graphqlURL = opts?.graphqlURL ?? defaultURL\n const apiKey = options.apiKey\n const teamId = options.teamId\n const headers = options.headers\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL })\n }\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils'\n\ntype Pricing = {\n inputToken?: number\n outputToken?: number\n responseTime?: number\n unitType?: 'TOKEN' | 'SECOND'\n currency?: string\n unit?: string\n}\n\nexport type FriendliAvailableModel = {\n id: string\n name?: string | null\n description?: string | null\n pricing?: Pricing\n warm?: boolean\n cold?: boolean\n contextLength?: number | null\n}\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[]\n}\n\ntype GraphQLResponse<T> = {\n data?: T\n errors?: Array<{ message: string }>\n}\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql'\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string\n variables?: Record<string, unknown>\n operationName?: string\n },\n headers: Record<string, string>,\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n })\n\n let json: GraphQLResponse<T>\n try {\n json = await res.json()\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText,\n )\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`,\n )\n }\n return json\n}\n\ntype ServerlessEndpointEdge = {\n id: string\n name?: string | null\n status?: 'WARM' | 'COLD' | string | null\n price?: {\n inputPrice?: number | null\n outputPrice?: number | null\n unit?: string | null\n responseTimePrice?: number | null\n priceUnitType?: 'TOKEN' | 'SECOND' | null\n } | null\n contextLength?: number | null\n}\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[]\n } | null\n}\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined\n return unit\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string\n teamId?: string\n headers?: Record<string, string>\n graphqlURL?: string\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })\n } catch {\n token = undefined\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n }\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers,\n )\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`,\n )\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? []\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM'\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as\n | 'TOKEN'\n | 'SECOND'\n | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n }\n })\n\n return { models }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE,cAAAA;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsBA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AG32BD,SAAS,kBAAkB;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,YACA,WAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MAIpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACtLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AN8CO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAUC,YAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC7B,EAAC,SAA2C,qBAC3C,OAAO,SAAmC;AAzO9C;AA0OM,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACF,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["loadApiKey","createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z","_a","_b","_c","_d","_e","_f","loadApiKey"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friendliai/ai-provider",
|
|
3
|
-
"version": "0.3.0-beta.
|
|
3
|
+
"version": "0.3.0-beta.7",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -25,9 +25,9 @@
|
|
|
25
25
|
}
|
|
26
26
|
},
|
|
27
27
|
"dependencies": {
|
|
28
|
-
"@ai-sdk/openai-compatible": "1.0.
|
|
28
|
+
"@ai-sdk/openai-compatible": "1.0.10",
|
|
29
29
|
"@ai-sdk/provider": "2.0.0",
|
|
30
|
-
"@ai-sdk/provider-utils": "3.0.
|
|
30
|
+
"@ai-sdk/provider-utils": "3.0.4"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
|
33
33
|
"@edge-runtime/vm": "^5.0.0",
|