@friendliai/ai-provider 0.2.1 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +21 -21
- package/dist/index.d.mts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +4 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +4 -3
- package/dist/index.mjs.map +1 -1
- package/package.json +9 -9
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# @friendliai/ai-provider
|
|
2
2
|
|
|
3
|
+
## 0.2.4
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 3be71b2: add deepseek r1 model
|
|
8
|
+
|
|
9
|
+
## 0.2.3
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 3085e03: update serverless model list
|
|
14
|
+
|
|
15
|
+
## 0.2.2
|
|
16
|
+
|
|
17
|
+
### Patch Changes
|
|
18
|
+
|
|
19
|
+
- b3e1fb1: Add Model Llama 3.3 70B Serverless
|
|
20
|
+
|
|
3
21
|
## 0.2.1
|
|
4
22
|
|
|
5
23
|
### Patch Changes
|
package/README.md
CHANGED
|
@@ -25,7 +25,7 @@ Check the [FriendliAI documentation](https://friendli.ai/docs/guides/personal_ac
|
|
|
25
25
|
## Provider Instance
|
|
26
26
|
|
|
27
27
|
```ts
|
|
28
|
-
import { friendli } from
|
|
28
|
+
import { friendli } from '@friendliai/ai-provider'
|
|
29
29
|
```
|
|
30
30
|
|
|
31
31
|
## Language Models
|
|
@@ -34,7 +34,7 @@ You can create [FriendliAI models](https://friendli.ai/docs/guides/serverless_en
|
|
|
34
34
|
The first argument is the model id, e.g. `meta-llama-3.1-8b-instruct`.
|
|
35
35
|
|
|
36
36
|
```ts
|
|
37
|
-
const model = friendli(
|
|
37
|
+
const model = friendli('meta-llama-3.1-8b-instruct')
|
|
38
38
|
```
|
|
39
39
|
|
|
40
40
|
### Example: Generating text
|
|
@@ -56,18 +56,18 @@ const { text } = await generateText({
|
|
|
56
56
|
Specify a specific pattern (e.g., CSV), character sets, or specific language characters (e.g., Korean Hangul characters) for your LLM's output.
|
|
57
57
|
|
|
58
58
|
```ts
|
|
59
|
-
import { friendli } from
|
|
60
|
-
import { generateText } from
|
|
59
|
+
import { friendli } from '@friendliai/ai-provider'
|
|
60
|
+
import { generateText } from 'ai'
|
|
61
61
|
|
|
62
62
|
const { text } = await generateText({
|
|
63
|
-
model: friendli(
|
|
64
|
-
regex:
|
|
63
|
+
model: friendli('meta-llama-3.1-8b-instruct', {
|
|
64
|
+
regex: '[\n ,.?!0-9\uac00-\ud7af]*',
|
|
65
65
|
}),
|
|
66
66
|
maxTokens: 40,
|
|
67
|
-
prompt:
|
|
68
|
-
})
|
|
67
|
+
prompt: 'who is the first king of the Joseon Dynasty?',
|
|
68
|
+
})
|
|
69
69
|
|
|
70
|
-
console.log(text)
|
|
70
|
+
console.log(text)
|
|
71
71
|
```
|
|
72
72
|
|
|
73
73
|
### Example: Using built-in tools (Beta)
|
|
@@ -77,24 +77,24 @@ If you use `@friendliai/ai-provider`, you can use the [built-in tools](https://f
|
|
|
77
77
|
Built-in tools allow models to use tools to generate better answers. For example, a `web:search` tool can provide up-to-date answers to current questions.
|
|
78
78
|
|
|
79
79
|
```ts highlight="1,8,9,10,11,12,13,14,15"
|
|
80
|
-
import { friendli } from
|
|
81
|
-
import { convertToCoreMessages, streamText } from
|
|
80
|
+
import { friendli } from '@friendliai/ai-provider'
|
|
81
|
+
import { convertToCoreMessages, streamText } from 'ai'
|
|
82
82
|
|
|
83
83
|
export async function POST(req: Request) {
|
|
84
|
-
const { messages } = await req.json()
|
|
84
|
+
const { messages } = await req.json()
|
|
85
85
|
|
|
86
86
|
const result = await streamText({
|
|
87
|
-
model: friendli(
|
|
87
|
+
model: friendli('meta-llama-3.1-8b-instruct', {
|
|
88
88
|
tools: [
|
|
89
|
-
{ type:
|
|
90
|
-
{ type:
|
|
91
|
-
{ type:
|
|
89
|
+
{ type: 'web:search' },
|
|
90
|
+
{ type: 'math:calculator' },
|
|
91
|
+
{ type: 'code:python-interpreter' }, // and more tools..!!
|
|
92
92
|
],
|
|
93
93
|
}),
|
|
94
94
|
messages: convertToCoreMessages(messages),
|
|
95
|
-
})
|
|
95
|
+
})
|
|
96
96
|
|
|
97
|
-
return result.toDataStreamResponse()
|
|
97
|
+
return result.toDataStreamResponse()
|
|
98
98
|
}
|
|
99
99
|
```
|
|
100
100
|
|
|
@@ -106,10 +106,10 @@ FriendliAI language models can also be used in the `streamText`, `generateObject
|
|
|
106
106
|
We can also use `@ai-sdk/openai` with OpenAI compatibility.
|
|
107
107
|
|
|
108
108
|
```ts
|
|
109
|
-
import { createOpenAI } from
|
|
109
|
+
import { createOpenAI } from '@ai-sdk/openai'
|
|
110
110
|
|
|
111
111
|
const friendli = createOpenAI({
|
|
112
|
-
baseURL:
|
|
112
|
+
baseURL: 'https://api.friendli.ai/serverless/v1',
|
|
113
113
|
apiKey: process.env.FRIENDLI_TOKEN,
|
|
114
|
-
})
|
|
114
|
+
})
|
|
115
115
|
```
|
package/dist/index.d.mts
CHANGED
|
@@ -3,10 +3,10 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
|
3
3
|
import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
|
|
6
|
-
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.1-70b-instruct", "
|
|
6
|
+
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.1-70b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
|
|
7
7
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
8
8
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
9
|
-
type FriendliAIBetaChatModelId =
|
|
9
|
+
type FriendliAIBetaChatModelId = 'llama-3.2-11b-vision-instruct' | (string & {});
|
|
10
10
|
interface FriendliAISharedSettings {
|
|
11
11
|
/**
|
|
12
12
|
* Sets the endpoint to which the request will be sent.
|
|
@@ -15,14 +15,14 @@ interface FriendliAISharedSettings {
|
|
|
15
15
|
* serverless: automatically selected as one of "/serverless/beta", "/serverless/v1", or "/serverless/tools/v1"
|
|
16
16
|
* Ignored if baseURL is specified.
|
|
17
17
|
*/
|
|
18
|
-
endpoint?:
|
|
18
|
+
endpoint?: 'auto' | 'dedicated' | 'serverless';
|
|
19
19
|
}
|
|
20
20
|
interface FriendliAIChatSettings extends FriendliAISharedSettings, OpenAICompatibleChatSettings {
|
|
21
21
|
/**
|
|
22
22
|
* BETA FEATURE: Include the model's training loss in the response.
|
|
23
23
|
*/
|
|
24
24
|
tools?: Array<{
|
|
25
|
-
type:
|
|
25
|
+
type: 'web:url' | 'web:search' | 'math:calendar' | 'math:statistics' | 'math:calculator' | 'code:python-interpreter';
|
|
26
26
|
}>;
|
|
27
27
|
/**
|
|
28
28
|
* Whether to enable parallel function calling during tool use. Default to true.
|
package/dist/index.d.ts
CHANGED
|
@@ -3,10 +3,10 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
|
3
3
|
import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
|
|
6
|
-
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.1-70b-instruct", "
|
|
6
|
+
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.1-70b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
|
|
7
7
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
8
8
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
9
|
-
type FriendliAIBetaChatModelId =
|
|
9
|
+
type FriendliAIBetaChatModelId = 'llama-3.2-11b-vision-instruct' | (string & {});
|
|
10
10
|
interface FriendliAISharedSettings {
|
|
11
11
|
/**
|
|
12
12
|
* Sets the endpoint to which the request will be sent.
|
|
@@ -15,14 +15,14 @@ interface FriendliAISharedSettings {
|
|
|
15
15
|
* serverless: automatically selected as one of "/serverless/beta", "/serverless/v1", or "/serverless/tools/v1"
|
|
16
16
|
* Ignored if baseURL is specified.
|
|
17
17
|
*/
|
|
18
|
-
endpoint?:
|
|
18
|
+
endpoint?: 'auto' | 'dedicated' | 'serverless';
|
|
19
19
|
}
|
|
20
20
|
interface FriendliAIChatSettings extends FriendliAISharedSettings, OpenAICompatibleChatSettings {
|
|
21
21
|
/**
|
|
22
22
|
* BETA FEATURE: Include the model's training loss in the response.
|
|
23
23
|
*/
|
|
24
24
|
tools?: Array<{
|
|
25
|
-
type:
|
|
25
|
+
type: 'web:url' | 'web:search' | 'math:calendar' | 'math:statistics' | 'math:calculator' | 'code:python-interpreter';
|
|
26
26
|
}>;
|
|
27
27
|
/**
|
|
28
28
|
* Whether to enable parallel function calling during tool use. Default to true.
|
package/dist/index.js
CHANGED
|
@@ -34,7 +34,8 @@ var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
|
34
34
|
var FriendliAIServerlessModelIds = [
|
|
35
35
|
"meta-llama-3.1-8b-instruct",
|
|
36
36
|
"meta-llama-3.1-70b-instruct",
|
|
37
|
-
"
|
|
37
|
+
"meta-llama-3.3-70b-instruct",
|
|
38
|
+
"deepseek-r1"
|
|
38
39
|
];
|
|
39
40
|
|
|
40
41
|
// src/friendli-chat-language-model.ts
|
|
@@ -303,7 +304,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
303
304
|
toolCallType: "function",
|
|
304
305
|
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
|
|
305
306
|
toolName: toolCall.function.name,
|
|
306
|
-
args: toolCall.function.arguments
|
|
307
|
+
args: typeof toolCall.function.arguments === "string" ? toolCall.function.arguments : JSON.stringify(toolCall.function.arguments)
|
|
307
308
|
};
|
|
308
309
|
}),
|
|
309
310
|
finishReason: (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason),
|
|
@@ -528,7 +529,7 @@ var friendliAIChatResponseSchema = import_zod2.z.object({
|
|
|
528
529
|
type: import_zod2.z.literal("function"),
|
|
529
530
|
function: import_zod2.z.object({
|
|
530
531
|
name: import_zod2.z.string(),
|
|
531
|
-
arguments: import_zod2.z.string()
|
|
532
|
+
arguments: import_zod2.z.union([import_zod2.z.string(), import_zod2.z.any()]).nullish()
|
|
532
533
|
})
|
|
533
534
|
})
|
|
534
535
|
).nullish()
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from \"./friendli-provider\";\nexport type { FriendliAIErrorData } from \"./friendli-error\";\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from \"./friendli-provider\";\n","import { LanguageModelV1, NoSuchModelError } from \"@ai-sdk/provider\";\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from \"@ai-sdk/provider-utils\";\nimport { OpenAICompatibleCompletionLanguageModel } from \"@ai-sdk/openai-compatible\";\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIBetaChatModelId,\n FriendliAIChatSettings,\n FriendliAICompletionSettings,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from \"./friendli-settings\";\nimport { FriendliAIChatLanguageModel } from \"./friendli-chat-language-model\";\nimport { friendliaiErrorStructure } from \"./friendli-error\";\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI__TOKEN)\n */\n apiKey?: string;\n /**\n * Base URL for the API calls.\n */\n baseURL?: string;\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>;\n /**\n * FriendliAI Team ID.\n */\n teamId?: string;\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport interface FriendliAIProvider {\n /**\n * Creates a model for text generation.\n */\n (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * A model that has not yet been officially released\n */\n beta(\n modelId: FriendliAIBetaChatModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n chatModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a completion model for text generation.\n */\n completion(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n completionModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a text embedding model for text generation.\n */\n embedding(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n textEmbeddingModel(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: \"FRIENDLI_TOKEN\",\n description: \"FRIENDLI_TOKEN\",\n })}`,\n \"X-Friendli-Team\": options.teamId,\n ...options.headers,\n });\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: \"auto\" | \"dedicated\" | \"serverless\" | \"beta\",\n baseURL: string | undefined,\n tools?: Array<any>,\n ): {\n baseURL: string;\n type: \"dedicated\" | \"serverless\" | \"tools\" | \"custom\" | \"beta\";\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL);\n if (typeof customBaseURL === \"string\") {\n return { baseURL: customBaseURL, type: \"custom\" };\n }\n\n const FriendliBaseURL = {\n beta: \"https://api.friendli.ai/serverless/beta\",\n serverless: \"https://api.friendli.ai/serverless/v1\",\n tools: \"https://api.friendli.ai/serverless/tools/v1\",\n dedicated: \"https://api.friendli.ai/dedicated/v1\",\n };\n\n if (endpoint === \"beta\") {\n return {\n baseURL: FriendliBaseURL.beta,\n type: \"beta\",\n };\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === \"serverless\" ||\n (endpoint === \"auto\" &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: \"tools\",\n };\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: \"serverless\",\n };\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: \"dedicated\",\n };\n }\n };\n\n const createChatModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || \"auto\",\n options.baseURL,\n settings.tools,\n );\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: \"json\",\n });\n };\n\n const createCompletionModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAICompletionSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || \"auto\",\n options.baseURL,\n );\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n });\n };\n\n const createBetaModel = (\n modelId: FriendliAIBetaChatModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n \"beta\",\n options.baseURL,\n );\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: \"json\",\n });\n };\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: \"textEmbeddingModel\" });\n };\n\n const provider = function (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ) {\n return createChatModel(modelId, settings);\n };\n\n provider.beta = createBetaModel;\n\n provider.chat = createChatModel;\n provider.chatModel = createChatModel;\n\n provider.completion = createCompletionModel;\n provider.completionModel = createCompletionModel;\n\n provider.embedding = createTextEmbeddingModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as FriendliAIProvider;\n}\n\nexport const friendli = createFriendli({});\n","import {\n OpenAICompatibleChatSettings,\n OpenAICompatibleCompletionSettings,\n} from \"@ai-sdk/openai-compatible\";\n\n// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n \"meta-llama-3.1-8b-instruct\",\n \"meta-llama-3.1-70b-instruct\",\n \"mixtral-8x7b-instruct-v0-1\",\n] as const;\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number];\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {});\n\nexport type FriendliAIBetaChatModelId =\n | \"llama-3.2-11b-vision-instruct\"\n | (string & {});\n\ninterface FriendliAISharedSettings {\n /**\n * Sets the endpoint to which the request will be sent.\n * auto: automatically selected based on model_id\n * dedicated: Fixed to \"/dedicated/v1\"\n * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n * Ignored if baseURL is specified.\n */\n endpoint?: \"auto\" | \"dedicated\" | \"serverless\";\n}\n\nexport interface FriendliAIChatSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleChatSettings {\n /**\n * BETA FEATURE: Include the model's training loss in the response.\n */\n tools?: Array<{\n type:\n | \"web:url\"\n | \"web:search\"\n | \"math:calendar\"\n | \"math:statistics\"\n | \"math:calculator\"\n | \"code:python-interpreter\";\n }>;\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls?: boolean;\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n regex?: RegExp;\n}\n\nexport interface FriendliAICompletionSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport {\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n OpenAICompatibleChatConfig,\n} from \"@ai-sdk/openai-compatible/internal\";\n\nimport { z } from \"zod\";\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIChatSettings,\n} from \"./friendli-settings\";\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n friendliaiFailedResponseHandler,\n} from \"./friendli-error\";\nimport { prepareTools } from \"./friendli-prepare-tools\";\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: FriendliAILanguageModelId;\n readonly settings: FriendliAIChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n\n constructor(\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n );\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true;\n }\n\n get defaultObjectGenerationMode(): \"json\" | \"tool\" {\n return this.config.defaultObjectGenerationMode ?? \"json\";\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (\n responseFormat?.type === \"json\" &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"responseFormat\",\n details:\n \"JSON response format schema is only supported with structuredOutputs\",\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n top_k: topK,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === \"json\"\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: \"json_schema\",\n json_schema: {\n schema: responseFormat.schema,\n description: responseFormat.description,\n },\n }\n : { type: \"json_object\" }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n if (this.settings.regex != null && type !== \"regular\") {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"egular expression is only supported with regular mode (generateText, streamText)\",\n });\n }\n\n switch (type) {\n case \"regular\": {\n if (this.settings.regex != null) {\n if (this.settings.tools != null || mode.tools != null) {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"Regular expression and tools cannot be used together. Use either regular expression or tools.\",\n });\n }\n\n return {\n args: {\n ...baseArgs,\n response_format: {\n type: \"regex\",\n schema: this.settings.regex.source,\n },\n },\n warnings,\n };\n }\n\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n tools: this.settings.tools,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case \"object-json\": {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: \"json_schema\",\n json_schema: {\n schema: mode.schema,\n description: mode.description,\n },\n }\n : { type: \"json_object\" },\n },\n warnings,\n };\n }\n\n case \"object-tool\": {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: \"function\",\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: \"function\",\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: false });\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: \"/chat/completions\",\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: false,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n friendliAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((toolCall) => ({\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1[\"doStream\"]>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: \"/chat/completions\",\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n stream_options: { include_usage: true },\n },\n failedResponseHandler: friendliaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n friendliaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: \"function\";\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = \"unknown\";\n let usage: {\n promptTokens: number | undefined;\n completionTokens: number | undefined;\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n };\n let isFirstChunk = true;\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof friendliaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = \"error\";\n controller.enqueue({ type: \"error\", error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // hosted tool execution case\n if (\"status\" in value) {\n switch (value.status) {\n case \"STARTED\":\n break;\n\n case \"UPDATING\":\n break;\n\n case \"ENDED\":\n break;\n\n case \"ERRORED\":\n finishReason = \"error\";\n break;\n\n default:\n finishReason = \"error\";\n controller.enqueue({\n type: \"error\",\n error: new Error(\n `Unsupported tool call status: ${value.status}`,\n ),\n });\n }\n return;\n }\n\n // handle error chunks:\n if (\"message\" in value) {\n console.error(\"Error chunk:\", value);\n finishReason = \"error\";\n controller.enqueue({ type: \"error\", error: value.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: \"response-metadata\",\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: \"text-delta\",\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== \"function\") {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: \"function\",\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? \"\",\n },\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallType: \"function\",\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? \"\";\n }\n\n // send delta\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallType: \"function\",\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? \"\",\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: \"finish\",\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal(\"assistant\").nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum([\"assistant\"]).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal(\"function\").optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum([\"ENDED\", \"STARTED\", \"ERRORED\", \"UPDATING\"]),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum([\"INVALID_PARAMETER\", \"UNKNOWN\"]),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n]);\n","import { z } from \"zod\";\nimport { ProviderErrorStructure } from \"@ai-sdk/openai-compatible\";\nimport { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n});\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n };\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n);\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport { FriendliAIChatSettings } from \"./friendli-settings\";\n\nexport function prepareTools({\n mode,\n tools: hostedTools,\n}: {\n mode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n type: \"regular\";\n };\n\n tools?: FriendliAIChatSettings[\"tools\"];\n}): {\n tools:\n | undefined\n | Array<{\n type: string;\n files?: string[];\n }>\n | Array<{\n type: \"function\";\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: \"function\"; function: { name: string } }\n | \"auto\"\n | \"none\"\n | \"required\"\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null && hostedTools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const mappedTools: Array<{\n type: \"function\";\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n if (tools) {\n for (const tool of tools) {\n if (tool.type === \"provider-defined\") {\n toolWarnings.push({ type: \"unsupported-tool\", tool });\n } else {\n mappedTools.push({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n }\n\n const mappedHostedTools = hostedTools?.map((tool) => {\n return {\n type: tool.type,\n };\n });\n\n if (toolChoice == null) {\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case \"auto\":\n case \"none\":\n case \"required\":\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: type,\n toolWarnings,\n };\n case \"tool\":\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: {\n type: \"function\",\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAAkD;AAClD,IAAAC,yBAIO;AACP,+BAAwD;;;ACCjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACXA,IAAAC,mBASO;AACP,IAAAC,yBAUO;AACP,sBAKO;AAEP,IAAAC,cAAkB;;;AC5BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AACpB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;AClBA,sBAIO;AAIA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,OAAO;AACT,GA4BE;AAvCF;AAyCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,QAAQ,eAAe,MAAM;AACxC,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,cAOD,CAAC;AAEN,MAAI,OAAO;AACT,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,oBAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,2CAAa,IAAI,CAAC,SAAS;AACnD,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,MAC5D,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9EO,IAAM,8BAAN,MAA6D;AAAA,EAWlE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AA1ClC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAEd,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA+C;AApErD;AAqEI,YAAO,UAAK,OAAO,gCAAZ,YAA2C;AAAA,EACpD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,cAAU,uDAAsC,MAAM;AAAA,IACxD;AAEA,QAAI,KAAK,SAAS,SAAS,QAAQ,SAAS,WAAW;AACrD,YAAM,IAAI,+CAA8B;AAAA,QACtC,eACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,YAAI,KAAK,SAAS,SAAS,MAAM;AAC/B,cAAI,KAAK,SAAS,SAAS,QAAQ,KAAK,SAAS,MAAM;AACrD,kBAAM,IAAI,+CAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,iBAAO;AAAA,YACL,MAAM;AAAA,cACJ,GAAG;AAAA,cACH,iBAAiB;AAAA,gBACf,MAAM;AAAA,gBACN,QAAQ,KAAK,SAAS,MAAM;AAAA,cAC9B;AAAA,YACF;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,OAAO,KAAK,SAAS;AAAA,QACvB,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAzOjE;AA0OI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,MAAM,CAAC;AAEtD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,CAAC,aAAU;AArQ3D,YAAAC;AAqQ+D;AAAA,UACvD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,cAAU,qCAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,MACxC;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI,eAAe;AAEnB,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA9UvC;AAgVY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,YAAY,OAAO;AACrB,sBAAQ,MAAM,QAAQ;AAAA,gBACpB,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,iCAAe;AACf;AAAA,gBAEF;AACE,iCAAe;AACf,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,OAAO,IAAI;AAAA,sBACT,iCAAiC,MAAM,MAAM;AAAA,oBAC/C;AAAA,kBACF,CAAC;AAAA,cACL;AACA;AAAA,YACF;AAGA,gBAAI,aAAa,OAAO;AACtB,sBAAQ,MAAM,gBAAgB,KAAK;AACnC,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC1D;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAzgB5B;AA0gBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAGA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;;;AF5hBM,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MACvC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,iEAAwC,SAAS,UAAU;AAAA,MACpE,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,QAAM,WAAW,SACf,SACA,UACA;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,WAAS,OAAO;AAChB,WAAS,YAAY;AAErB,WAAS,aAAa;AACtB,WAAS,kBAAkB;AAE3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,WAAW,eAAe,CAAC,CAAC;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import { LanguageModelV1, NoSuchModelError } from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIBetaChatModelId,\n FriendliAIChatSettings,\n FriendliAICompletionSettings,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI__TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider {\n /**\n * Creates a model for text generation.\n */\n (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * A model that has not yet been officially released\n */\n beta(\n modelId: FriendliAIBetaChatModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n chatModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a completion model for text generation.\n */\n completion(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n completionModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a text embedding model for text generation.\n */\n embedding(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n textEmbeddingModel(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: 'auto' | 'dedicated' | 'serverless' | 'beta',\n baseURL: string | undefined,\n tools?: Array<unknown>,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'tools' | 'custom' | 'beta'\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (typeof customBaseURL === 'string') {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n const FriendliBaseURL = {\n beta: 'https://api.friendli.ai/serverless/beta',\n serverless: 'https://api.friendli.ai/serverless/v1',\n tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n if (endpoint === 'beta') {\n return {\n baseURL: FriendliBaseURL.beta,\n type: 'beta',\n }\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === 'serverless' ||\n (endpoint === 'auto' &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: 'tools',\n }\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n\n const createChatModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || 'auto',\n options.baseURL,\n settings.tools,\n )\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: 'json',\n })\n }\n\n const createCompletionModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAICompletionSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || 'auto',\n options.baseURL,\n )\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createBetaModel = (\n modelId: FriendliAIBetaChatModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n 'beta',\n options.baseURL,\n )\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: 'json',\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n\n const provider = function (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ) {\n return createChatModel(modelId, settings)\n }\n\n provider.beta = createBetaModel\n\n provider.chat = createChatModel\n provider.chatModel = createChatModel\n\n provider.completion = createCompletionModel\n provider.completionModel = createCompletionModel\n\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n\n return provider as FriendliAIProvider\n}\n\nexport const friendli = createFriendli({})\n","import {\n OpenAICompatibleChatSettings,\n OpenAICompatibleCompletionSettings,\n} from '@ai-sdk/openai-compatible'\n\n// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.1-70b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n\nexport type FriendliAIBetaChatModelId =\n | 'llama-3.2-11b-vision-instruct'\n | (string & {})\n\ninterface FriendliAISharedSettings {\n /**\n * Sets the endpoint to which the request will be sent.\n * auto: automatically selected based on model_id\n * dedicated: Fixed to \"/dedicated/v1\"\n * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n * Ignored if baseURL is specified.\n */\n endpoint?: 'auto' | 'dedicated' | 'serverless'\n}\n\nexport interface FriendliAIChatSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleChatSettings {\n /**\n * BETA FEATURE: Include the model's training loss in the response.\n */\n tools?: Array<{\n type:\n | 'web:url'\n | 'web:search'\n | 'math:calendar'\n | 'math:statistics'\n | 'math:calculator'\n | 'code:python-interpreter'\n }>\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls?: boolean\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n regex?: RegExp\n}\n\nexport interface FriendliAICompletionSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\nimport {\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n OpenAICompatibleChatConfig,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIChatSettings,\n} from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n\n constructor(\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n this.settings = settings\n this.config = config\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' {\n return this.config.defaultObjectGenerationMode ?? 'json'\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type\n\n const warnings: LanguageModelV1CallWarning[] = []\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n top_k: topK,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n }\n\n if (this.settings.regex != null && type !== 'regular') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'egular expression is only supported with regular mode (generateText, streamText)',\n })\n }\n\n switch (type) {\n case 'regular': {\n if (this.settings.regex != null) {\n if (this.settings.tools != null || mode.tools != null) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Regular expression and tools cannot be used together. Use either regular expression or tools.',\n })\n }\n\n return {\n args: {\n ...baseArgs,\n response_format: {\n type: 'regex',\n schema: this.settings.regex.source,\n },\n },\n warnings,\n }\n }\n\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n tools: this.settings.tools,\n })\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n }\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n }\n }\n\n default: {\n const _exhaustiveCheck: never = type\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`)\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options })\n\n const body = JSON.stringify({ ...args, stream: false })\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: false,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n friendliAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const { messages: rawPrompt, ...rawSettings } = args\n const choice = response.choices[0]\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((toolCall) => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args:\n typeof toolCall.function.arguments! === 'string'\n ? toolCall.function.arguments!\n : JSON.stringify(toolCall.function.arguments),\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body },\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs({ ...options })\n\n const body = JSON.stringify({ ...args, stream: true })\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n stream_options: { include_usage: true },\n },\n failedResponseHandler: friendliaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n friendliaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const { messages: rawPrompt, ...rawSettings } = args\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n }> = []\n\n let finishReason: LanguageModelV1FinishReason = 'unknown'\n let usage: {\n promptTokens: number | undefined\n completionTokens: number | undefined\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n }\n let isFirstChunk = true\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof friendliaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n\n const value = chunk.value\n\n // hosted tool execution case\n if ('status' in value) {\n switch (value.status) {\n case 'STARTED':\n break\n\n case 'UPDATING':\n break\n\n case 'ENDED':\n break\n\n case 'ERRORED':\n finishReason = 'error'\n break\n\n default:\n finishReason = 'error'\n controller.enqueue({\n type: 'error',\n error: new Error(\n `Unsupported tool call status: ${value.status}`,\n ),\n })\n }\n return\n }\n\n // handle error chunks:\n if ('message' in value) {\n console.error('Error chunk:', value)\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n }\n }\n\n continue\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index]\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n })\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nimport { FriendliAIChatSettings } from './friendli-settings'\n\nexport function prepareTools({\n mode,\n tools: hostedTools,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular'\n }\n\n tools?: FriendliAIChatSettings['tools']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV1CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined\n const toolWarnings: LanguageModelV1CallWarning[] = []\n\n if (tools == null && hostedTools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings }\n }\n\n const toolChoice = mode.toolChoice\n\n const mappedTools: Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }> = []\n\n if (tools) {\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool })\n } else {\n mappedTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n }\n\n const mappedHostedTools = hostedTools?.map((tool) => {\n return {\n type: tool.type,\n }\n })\n\n if (toolChoice == null) {\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: undefined,\n toolWarnings,\n }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: type,\n toolWarnings,\n }\n case 'tool':\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAAkD;AAClD,IAAAC,yBAIO;AACP,+BAAwD;;;ACCjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACZA,IAAAC,mBASO;AACP,IAAAC,yBAUO;AACP,sBAKO;AAEP,IAAAC,cAAkB;;;AC5BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AACpB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;AClBA,sBAIO;AAIA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,OAAO;AACT,GA4BE;AAvCF;AAyCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,QAAQ,eAAe,MAAM;AACxC,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,cAOD,CAAC;AAEN,MAAI,OAAO;AACT,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,oBAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,2CAAa,IAAI,CAAC,SAAS;AACnD,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,MAC5D,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9EO,IAAM,8BAAN,MAA6D;AAAA,EAWlE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AA1ClC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAEd,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA+C;AApErD;AAqEI,YAAO,UAAK,OAAO,gCAAZ,YAA2C;AAAA,EACpD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,cAAU,uDAAsC,MAAM;AAAA,IACxD;AAEA,QAAI,KAAK,SAAS,SAAS,QAAQ,SAAS,WAAW;AACrD,YAAM,IAAI,+CAA8B;AAAA,QACtC,eACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,YAAI,KAAK,SAAS,SAAS,MAAM;AAC/B,cAAI,KAAK,SAAS,SAAS,QAAQ,KAAK,SAAS,MAAM;AACrD,kBAAM,IAAI,+CAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,iBAAO;AAAA,YACL,MAAM;AAAA,cACJ,GAAG;AAAA,cACH,iBAAiB;AAAA,gBACf,MAAM;AAAA,gBACN,QAAQ,KAAK,SAAS,MAAM;AAAA,cAC9B;AAAA,YACF;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,OAAO,KAAK,SAAS;AAAA,QACvB,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAzOjE;AA0OI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,MAAM,CAAC;AAEtD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,CAAC,aAAU;AArQ3D,YAAAC;AAqQ+D;AAAA,UACvD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MACE,OAAO,SAAS,SAAS,cAAe,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,SAAS;AAAA,QAClD;AAAA;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,cAAU,qCAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,MACxC;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI,eAAe;AAEnB,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAjVvC;AAmVY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,YAAY,OAAO;AACrB,sBAAQ,MAAM,QAAQ;AAAA,gBACpB,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,iCAAe;AACf;AAAA,gBAEF;AACE,iCAAe;AACf,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,OAAO,IAAI;AAAA,sBACT,iCAAiC,MAAM,MAAM;AAAA,oBAC/C;AAAA,kBACF,CAAC;AAAA,cACL;AACA;AAAA,YACF;AAGA,gBAAI,aAAa,OAAO;AACtB,sBAAQ,MAAM,gBAAgB,KAAK;AACnC,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC1D;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AA5gB5B;AA6gBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAGA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;;;AF/hBM,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MACvC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,iEAAwC,SAAS,UAAU;AAAA,MACpE,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,QAAM,WAAW,SACf,SACA,UACA;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,WAAS,OAAO;AAChB,WAAS,YAAY;AAErB,WAAS,aAAa;AACtB,WAAS,kBAAkB;AAE3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,WAAW,eAAe,CAAC,CAAC;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
package/dist/index.mjs
CHANGED
|
@@ -10,7 +10,8 @@ import { OpenAICompatibleCompletionLanguageModel } from "@ai-sdk/openai-compatib
|
|
|
10
10
|
var FriendliAIServerlessModelIds = [
|
|
11
11
|
"meta-llama-3.1-8b-instruct",
|
|
12
12
|
"meta-llama-3.1-70b-instruct",
|
|
13
|
-
"
|
|
13
|
+
"meta-llama-3.3-70b-instruct",
|
|
14
|
+
"deepseek-r1"
|
|
14
15
|
];
|
|
15
16
|
|
|
16
17
|
// src/friendli-chat-language-model.ts
|
|
@@ -296,7 +297,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
296
297
|
toolCallType: "function",
|
|
297
298
|
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
298
299
|
toolName: toolCall.function.name,
|
|
299
|
-
args: toolCall.function.arguments
|
|
300
|
+
args: typeof toolCall.function.arguments === "string" ? toolCall.function.arguments : JSON.stringify(toolCall.function.arguments)
|
|
300
301
|
};
|
|
301
302
|
}),
|
|
302
303
|
finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
|
|
@@ -521,7 +522,7 @@ var friendliAIChatResponseSchema = z2.object({
|
|
|
521
522
|
type: z2.literal("function"),
|
|
522
523
|
function: z2.object({
|
|
523
524
|
name: z2.string(),
|
|
524
|
-
arguments: z2.string()
|
|
525
|
+
arguments: z2.union([z2.string(), z2.any()]).nullish()
|
|
525
526
|
})
|
|
526
527
|
})
|
|
527
528
|
).nullish()
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["import { LanguageModelV1, NoSuchModelError } from \"@ai-sdk/provider\";\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from \"@ai-sdk/provider-utils\";\nimport { OpenAICompatibleCompletionLanguageModel } from \"@ai-sdk/openai-compatible\";\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIBetaChatModelId,\n FriendliAIChatSettings,\n FriendliAICompletionSettings,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from \"./friendli-settings\";\nimport { FriendliAIChatLanguageModel } from \"./friendli-chat-language-model\";\nimport { friendliaiErrorStructure } from \"./friendli-error\";\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI__TOKEN)\n */\n apiKey?: string;\n /**\n * Base URL for the API calls.\n */\n baseURL?: string;\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>;\n /**\n * FriendliAI Team ID.\n */\n teamId?: string;\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport interface FriendliAIProvider {\n /**\n * Creates a model for text generation.\n */\n (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * A model that has not yet been officially released\n */\n beta(\n modelId: FriendliAIBetaChatModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n chatModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a completion model for text generation.\n */\n completion(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n completionModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n /**\n * Creates a text embedding model for text generation.\n */\n embedding(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n textEmbeddingModel(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1;\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: \"FRIENDLI_TOKEN\",\n description: \"FRIENDLI_TOKEN\",\n })}`,\n \"X-Friendli-Team\": options.teamId,\n ...options.headers,\n });\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: \"auto\" | \"dedicated\" | \"serverless\" | \"beta\",\n baseURL: string | undefined,\n tools?: Array<any>,\n ): {\n baseURL: string;\n type: \"dedicated\" | \"serverless\" | \"tools\" | \"custom\" | \"beta\";\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL);\n if (typeof customBaseURL === \"string\") {\n return { baseURL: customBaseURL, type: \"custom\" };\n }\n\n const FriendliBaseURL = {\n beta: \"https://api.friendli.ai/serverless/beta\",\n serverless: \"https://api.friendli.ai/serverless/v1\",\n tools: \"https://api.friendli.ai/serverless/tools/v1\",\n dedicated: \"https://api.friendli.ai/dedicated/v1\",\n };\n\n if (endpoint === \"beta\") {\n return {\n baseURL: FriendliBaseURL.beta,\n type: \"beta\",\n };\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === \"serverless\" ||\n (endpoint === \"auto\" &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: \"tools\",\n };\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: \"serverless\",\n };\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: \"dedicated\",\n };\n }\n };\n\n const createChatModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || \"auto\",\n options.baseURL,\n settings.tools,\n );\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: \"json\",\n });\n };\n\n const createCompletionModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAICompletionSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || \"auto\",\n options.baseURL,\n );\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n });\n };\n\n const createBetaModel = (\n modelId: FriendliAIBetaChatModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n \"beta\",\n options.baseURL,\n );\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: \"json\",\n });\n };\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: \"textEmbeddingModel\" });\n };\n\n const provider = function (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ) {\n return createChatModel(modelId, settings);\n };\n\n provider.beta = createBetaModel;\n\n provider.chat = createChatModel;\n provider.chatModel = createChatModel;\n\n provider.completion = createCompletionModel;\n provider.completionModel = createCompletionModel;\n\n provider.embedding = createTextEmbeddingModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as FriendliAIProvider;\n}\n\nexport const friendli = createFriendli({});\n","import {\n OpenAICompatibleChatSettings,\n OpenAICompatibleCompletionSettings,\n} from \"@ai-sdk/openai-compatible\";\n\n// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n \"meta-llama-3.1-8b-instruct\",\n \"meta-llama-3.1-70b-instruct\",\n \"mixtral-8x7b-instruct-v0-1\",\n] as const;\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number];\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {});\n\nexport type FriendliAIBetaChatModelId =\n | \"llama-3.2-11b-vision-instruct\"\n | (string & {});\n\ninterface FriendliAISharedSettings {\n /**\n * Sets the endpoint to which the request will be sent.\n * auto: automatically selected based on model_id\n * dedicated: Fixed to \"/dedicated/v1\"\n * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n * Ignored if baseURL is specified.\n */\n endpoint?: \"auto\" | \"dedicated\" | \"serverless\";\n}\n\nexport interface FriendliAIChatSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleChatSettings {\n /**\n * BETA FEATURE: Include the model's training loss in the response.\n */\n tools?: Array<{\n type:\n | \"web:url\"\n | \"web:search\"\n | \"math:calendar\"\n | \"math:statistics\"\n | \"math:calculator\"\n | \"code:python-interpreter\";\n }>;\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls?: boolean;\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n regex?: RegExp;\n}\n\nexport interface FriendliAICompletionSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport {\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n OpenAICompatibleChatConfig,\n} from \"@ai-sdk/openai-compatible/internal\";\n\nimport { z } from \"zod\";\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIChatSettings,\n} from \"./friendli-settings\";\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n friendliaiFailedResponseHandler,\n} from \"./friendli-error\";\nimport { prepareTools } from \"./friendli-prepare-tools\";\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: FriendliAILanguageModelId;\n readonly settings: FriendliAIChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n\n constructor(\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n );\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true;\n }\n\n get defaultObjectGenerationMode(): \"json\" | \"tool\" {\n return this.config.defaultObjectGenerationMode ?? \"json\";\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (\n responseFormat?.type === \"json\" &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"responseFormat\",\n details:\n \"JSON response format schema is only supported with structuredOutputs\",\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n top_k: topK,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === \"json\"\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: \"json_schema\",\n json_schema: {\n schema: responseFormat.schema,\n description: responseFormat.description,\n },\n }\n : { type: \"json_object\" }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n if (this.settings.regex != null && type !== \"regular\") {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"egular expression is only supported with regular mode (generateText, streamText)\",\n });\n }\n\n switch (type) {\n case \"regular\": {\n if (this.settings.regex != null) {\n if (this.settings.tools != null || mode.tools != null) {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"Regular expression and tools cannot be used together. Use either regular expression or tools.\",\n });\n }\n\n return {\n args: {\n ...baseArgs,\n response_format: {\n type: \"regex\",\n schema: this.settings.regex.source,\n },\n },\n warnings,\n };\n }\n\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n tools: this.settings.tools,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case \"object-json\": {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: \"json_schema\",\n json_schema: {\n schema: mode.schema,\n description: mode.description,\n },\n }\n : { type: \"json_object\" },\n },\n warnings,\n };\n }\n\n case \"object-tool\": {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: \"function\",\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: \"function\",\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1[\"doGenerate\"]>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: false });\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: \"/chat/completions\",\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: false,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n friendliAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((toolCall) => ({\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1[\"doStream\"]>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: \"/chat/completions\",\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n stream_options: { include_usage: true },\n },\n failedResponseHandler: friendliaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n friendliaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: \"function\";\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = \"unknown\";\n let usage: {\n promptTokens: number | undefined;\n completionTokens: number | undefined;\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n };\n let isFirstChunk = true;\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof friendliaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = \"error\";\n controller.enqueue({ type: \"error\", error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // hosted tool execution case\n if (\"status\" in value) {\n switch (value.status) {\n case \"STARTED\":\n break;\n\n case \"UPDATING\":\n break;\n\n case \"ENDED\":\n break;\n\n case \"ERRORED\":\n finishReason = \"error\";\n break;\n\n default:\n finishReason = \"error\";\n controller.enqueue({\n type: \"error\",\n error: new Error(\n `Unsupported tool call status: ${value.status}`,\n ),\n });\n }\n return;\n }\n\n // handle error chunks:\n if (\"message\" in value) {\n console.error(\"Error chunk:\", value);\n finishReason = \"error\";\n controller.enqueue({ type: \"error\", error: value.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: \"response-metadata\",\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: \"text-delta\",\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== \"function\") {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: \"function\",\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? \"\",\n },\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallType: \"function\",\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? \"\";\n }\n\n // send delta\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallType: \"function\",\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? \"\",\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: \"finish\",\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal(\"assistant\").nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum([\"assistant\"]).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal(\"function\").optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum([\"ENDED\", \"STARTED\", \"ERRORED\", \"UPDATING\"]),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum([\"INVALID_PARAMETER\", \"UNKNOWN\"]),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n]);\n","import { z } from \"zod\";\nimport { ProviderErrorStructure } from \"@ai-sdk/openai-compatible\";\nimport { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n});\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n };\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n);\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nimport { FriendliAIChatSettings } from \"./friendli-settings\";\n\nexport function prepareTools({\n mode,\n tools: hostedTools,\n}: {\n mode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n type: \"regular\";\n };\n\n tools?: FriendliAIChatSettings[\"tools\"];\n}): {\n tools:\n | undefined\n | Array<{\n type: string;\n files?: string[];\n }>\n | Array<{\n type: \"function\";\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: \"function\"; function: { name: string } }\n | \"auto\"\n | \"none\"\n | \"required\"\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null && hostedTools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const mappedTools: Array<{\n type: \"function\";\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n if (tools) {\n for (const tool of tools) {\n if (tool.type === \"provider-defined\") {\n toolWarnings.push({ type: \"unsupported-tool\", tool });\n } else {\n mappedTools.push({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n }\n\n const mappedHostedTools = hostedTools?.map((tool) => {\n return {\n type: tool.type,\n };\n });\n\n if (toolChoice == null) {\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case \"auto\":\n case \"none\":\n case \"required\":\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: type,\n toolWarnings,\n };\n case \"tool\":\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: {\n type: \"function\",\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n"],"mappings":";AAAA,SAA0B,wBAAwB;AAClD;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACCjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACXA;AAAA,EAEE;AAAA,EAMA,iCAAAA;AAAA,OACK;AACP;AAAA,EAGE;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAEP,SAAS,KAAAC,UAAS;;;AC5BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AACpB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;AClBA;AAAA,EAGE;AAAA,OACK;AAIA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,OAAO;AACT,GA4BE;AAvCF;AAyCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,QAAQ,eAAe,MAAM;AACxC,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,cAOD,CAAC;AAEN,MAAI,OAAO;AACT,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,oBAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,2CAAa,IAAI,CAAC,SAAS;AACnD,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,MAC5D,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9EO,IAAM,8BAAN,MAA6D;AAAA,EAWlE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AA1ClC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAEd,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA+C;AApErD;AAqEI,YAAO,UAAK,OAAO,gCAAZ,YAA2C;AAAA,EACpD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,QAAI,KAAK,SAAS,SAAS,QAAQ,SAAS,WAAW;AACrD,YAAM,IAAIC,+BAA8B;AAAA,QACtC,eACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,YAAI,KAAK,SAAS,SAAS,MAAM;AAC/B,cAAI,KAAK,SAAS,SAAS,QAAQ,KAAK,SAAS,MAAM;AACrD,kBAAM,IAAIA,+BAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,iBAAO;AAAA,YACL,MAAM;AAAA,cACJ,GAAG;AAAA,cACH,iBAAiB;AAAA,gBACf,MAAM;AAAA,gBACN,QAAQ,KAAK,SAAS,MAAM;AAAA,cAC9B;AAAA,YACF;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,OAAO,KAAK,SAAS;AAAA,QACvB,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAzOjE;AA0OI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,MAAM,CAAC;AAEtD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,CAAC,aAAU;AArQ3D,YAAAC;AAqQ+D;AAAA,UACvD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,MACxC;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI,eAAe;AAEnB,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA9UvC;AAgVY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,YAAY,OAAO;AACrB,sBAAQ,MAAM,QAAQ;AAAA,gBACpB,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,iCAAe;AACf;AAAA,gBAEF;AACE,iCAAe;AACf,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,OAAO,IAAI;AAAA,sBACT,iCAAiC,MAAM,MAAM;AAAA,oBAC/C;AAAA,kBACF,CAAC;AAAA,cACL;AACA;AAAA,YACF;AAGA,gBAAI,aAAa,OAAO;AACtB,sBAAQ,MAAM,gBAAgB,KAAK;AACnC,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC1D;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAzgB5B;AA0gBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAGA,IAAM,+BAA+BC,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;;;AF5hBM,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MACvC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,wCAAwC,SAAS,UAAU;AAAA,MACpE,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,QAAM,WAAW,SACf,SACA,UACA;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,WAAS,OAAO;AAChB,WAAS,YAAY;AAErB,WAAS,aAAa;AACtB,WAAS,kBAAkB;AAE3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,WAAW,eAAe,CAAC,CAAC;","names":["UnsupportedFunctionalityError","createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","UnsupportedFunctionalityError","_a","toolCall","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts"],"sourcesContent":["import { LanguageModelV1, NoSuchModelError } from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIBetaChatModelId,\n FriendliAIChatSettings,\n FriendliAICompletionSettings,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI__TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider {\n /**\n * Creates a model for text generation.\n */\n (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * A model that has not yet been officially released\n */\n beta(\n modelId: FriendliAIBetaChatModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n chatModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a completion model for text generation.\n */\n completion(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n completionModel(\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n /**\n * Creates a text embedding model for text generation.\n */\n embedding(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n textEmbeddingModel(\n modelId: string & {},\n settings?: FriendliAIChatSettings,\n ): LanguageModelV1\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n endpoint: 'auto' | 'dedicated' | 'serverless' | 'beta',\n baseURL: string | undefined,\n tools?: Array<unknown>,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'tools' | 'custom' | 'beta'\n } => {\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (typeof customBaseURL === 'string') {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n const FriendliBaseURL = {\n beta: 'https://api.friendli.ai/serverless/beta',\n serverless: 'https://api.friendli.ai/serverless/v1',\n tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n if (endpoint === 'beta') {\n return {\n baseURL: FriendliBaseURL.beta,\n type: 'beta',\n }\n }\n\n if (\n // If the endpoint setting is serverless or auto and the model is floating on serverless,\n endpoint === 'serverless' ||\n (endpoint === 'auto' &&\n Object.values(FriendliAIServerlessModelIds).includes(\n modelId as FriendliAIServerlessModelId,\n ))\n ) {\n if (tools && tools.length > 0) {\n return {\n baseURL: FriendliBaseURL.tools,\n type: 'tools',\n }\n }\n\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n\n const createChatModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || 'auto',\n options.baseURL,\n settings.tools,\n )\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: 'json',\n })\n }\n\n const createCompletionModel = (\n modelId: FriendliAILanguageModelId,\n settings: FriendliAICompletionSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n settings.endpoint || 'auto',\n options.baseURL,\n )\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createBetaModel = (\n modelId: FriendliAIBetaChatModelId,\n settings: FriendliAIChatSettings = {},\n ) => {\n const { baseURL, type } = baseURLAutoSelect(\n modelId,\n 'beta',\n options.baseURL,\n )\n\n return new FriendliAIChatLanguageModel(modelId, settings, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: 'json',\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n\n const provider = function (\n modelId: FriendliAILanguageModelId,\n settings?: FriendliAIChatSettings,\n ) {\n return createChatModel(modelId, settings)\n }\n\n provider.beta = createBetaModel\n\n provider.chat = createChatModel\n provider.chatModel = createChatModel\n\n provider.completion = createCompletionModel\n provider.completionModel = createCompletionModel\n\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n\n return provider as FriendliAIProvider\n}\n\nexport const friendli = createFriendli({})\n","import {\n OpenAICompatibleChatSettings,\n OpenAICompatibleCompletionSettings,\n} from '@ai-sdk/openai-compatible'\n\n// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.1-70b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n\nexport type FriendliAIBetaChatModelId =\n | 'llama-3.2-11b-vision-instruct'\n | (string & {})\n\ninterface FriendliAISharedSettings {\n /**\n * Sets the endpoint to which the request will be sent.\n * auto: automatically selected based on model_id\n * dedicated: Fixed to \"/dedicated/v1\"\n * serverless: automatically selected as one of \"/serverless/beta\", \"/serverless/v1\", or \"/serverless/tools/v1\"\n * Ignored if baseURL is specified.\n */\n endpoint?: 'auto' | 'dedicated' | 'serverless'\n}\n\nexport interface FriendliAIChatSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleChatSettings {\n /**\n * BETA FEATURE: Include the model's training loss in the response.\n */\n tools?: Array<{\n type:\n | 'web:url'\n | 'web:search'\n | 'math:calendar'\n | 'math:statistics'\n | 'math:calculator'\n | 'code:python-interpreter'\n }>\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls?: boolean\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n regex?: RegExp\n}\n\nexport interface FriendliAICompletionSettings\n extends FriendliAISharedSettings,\n OpenAICompatibleCompletionSettings {}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\nimport {\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n OpenAICompatibleChatConfig,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIChatSettings,\n} from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n\n constructor(\n modelId: FriendliAILanguageModelId,\n settings: FriendliAIChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n this.settings = settings\n this.config = config\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' {\n return this.config.defaultObjectGenerationMode ?? 'json'\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type\n\n const warnings: LanguageModelV1CallWarning[] = []\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n top_k: topK,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n }\n\n if (this.settings.regex != null && type !== 'regular') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'egular expression is only supported with regular mode (generateText, streamText)',\n })\n }\n\n switch (type) {\n case 'regular': {\n if (this.settings.regex != null) {\n if (this.settings.tools != null || mode.tools != null) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Regular expression and tools cannot be used together. Use either regular expression or tools.',\n })\n }\n\n return {\n args: {\n ...baseArgs,\n response_format: {\n type: 'regex',\n schema: this.settings.regex.source,\n },\n },\n warnings,\n }\n }\n\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n tools: this.settings.tools,\n })\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n }\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n }\n }\n\n default: {\n const _exhaustiveCheck: never = type\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`)\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options })\n\n const body = JSON.stringify({ ...args, stream: false })\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: false,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n friendliAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const { messages: rawPrompt, ...rawSettings } = args\n const choice = response.choices[0]\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((toolCall) => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args:\n typeof toolCall.function.arguments! === 'string'\n ? toolCall.function.arguments!\n : JSON.stringify(toolCall.function.arguments),\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body },\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs({ ...options })\n\n const body = JSON.stringify({ ...args, stream: true })\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n stream_options: { include_usage: true },\n },\n failedResponseHandler: friendliaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n friendliaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const { messages: rawPrompt, ...rawSettings } = args\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n }> = []\n\n let finishReason: LanguageModelV1FinishReason = 'unknown'\n let usage: {\n promptTokens: number | undefined\n completionTokens: number | undefined\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n }\n let isFirstChunk = true\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof friendliaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n\n const value = chunk.value\n\n // hosted tool execution case\n if ('status' in value) {\n switch (value.status) {\n case 'STARTED':\n break\n\n case 'UPDATING':\n break\n\n case 'ENDED':\n break\n\n case 'ERRORED':\n finishReason = 'error'\n break\n\n default:\n finishReason = 'error'\n controller.enqueue({\n type: 'error',\n error: new Error(\n `Unsupported tool call status: ${value.status}`,\n ),\n })\n }\n return\n }\n\n // handle error chunks:\n if ('message' in value) {\n console.error('Error chunk:', value)\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n }\n }\n\n continue\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index]\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n })\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nimport { FriendliAIChatSettings } from './friendli-settings'\n\nexport function prepareTools({\n mode,\n tools: hostedTools,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular'\n }\n\n tools?: FriendliAIChatSettings['tools']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV1CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined\n const toolWarnings: LanguageModelV1CallWarning[] = []\n\n if (tools == null && hostedTools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings }\n }\n\n const toolChoice = mode.toolChoice\n\n const mappedTools: Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }> = []\n\n if (tools) {\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool })\n } else {\n mappedTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n }\n\n const mappedHostedTools = hostedTools?.map((tool) => {\n return {\n type: tool.type,\n }\n })\n\n if (toolChoice == null) {\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: undefined,\n toolWarnings,\n }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: type,\n toolWarnings,\n }\n case 'tool':\n return {\n tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n"],"mappings":";AAAA,SAA0B,wBAAwB;AAClD;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACCjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACZA;AAAA,EAEE;AAAA,EAMA,iCAAAA;AAAA,OACK;AACP;AAAA,EAGE;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAEP,SAAS,KAAAC,UAAS;;;AC5BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AACpB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;AClBA;AAAA,EAGE;AAAA,OACK;AAIA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,OAAO;AACT,GA4BE;AAvCF;AAyCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,QAAQ,eAAe,MAAM;AACxC,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,cAOD,CAAC;AAEN,MAAI,OAAO;AACT,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,oBAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,2CAAa,IAAI,CAAC,SAAS;AACnD,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,MAC5D,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,CAAC,GAAI,oCAAe,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAAA,QAC5D,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF9EO,IAAM,8BAAN,MAA6D;AAAA,EAWlE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AA1ClC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAEd,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA+C;AApErD;AAqEI,YAAO,UAAK,OAAO,gCAAZ,YAA2C;AAAA,EACpD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,QAAI,KAAK,SAAS,SAAS,QAAQ,SAAS,WAAW;AACrD,YAAM,IAAIC,+BAA8B;AAAA,QACtC,eACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,YAAI,KAAK,SAAS,SAAS,MAAM;AAC/B,cAAI,KAAK,SAAS,SAAS,QAAQ,KAAK,SAAS,MAAM;AACrD,kBAAM,IAAIA,+BAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,iBAAO;AAAA,YACL,MAAM;AAAA,cACJ,GAAG;AAAA,cACH,iBAAiB;AAAA,gBACf,MAAM;AAAA,gBACN,QAAQ,KAAK,SAAS,MAAM;AAAA,cAC9B;AAAA,YACF;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,OAAO,KAAK,SAAS;AAAA,QACvB,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAzOjE;AA0OI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,MAAM,CAAC;AAEtD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,CAAC,aAAU;AArQ3D,YAAAC;AAqQ+D;AAAA,UACvD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MACE,OAAO,SAAS,SAAS,cAAe,WACpC,SAAS,SAAS,YAClB,KAAK,UAAU,SAAS,SAAS,SAAS;AAAA,QAClD;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,MACxC;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI,eAAe;AAEnB,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAjVvC;AAmVY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,YAAY,OAAO;AACrB,sBAAQ,MAAM,QAAQ;AAAA,gBACpB,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH;AAAA,gBAEF,KAAK;AACH,iCAAe;AACf;AAAA,gBAEF;AACE,iCAAe;AACf,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,OAAO,IAAI;AAAA,sBACT,iCAAiC,MAAM,MAAM;AAAA,oBAC/C;AAAA,kBACF,CAAC;AAAA,cACL;AACA;AAAA,YACF;AAGA,gBAAI,aAAa,OAAO;AACtB,sBAAQ,MAAM,gBAAgB,KAAK;AACnC,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC1D;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AA5gB5B;AA6gBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAGA,IAAM,+BAA+BC,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;;;AF/hBM,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,UACA,SACA,UAIG;AAEH,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QAAI,OAAO,kBAAkB,UAAU;AACrC,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,UAAM,kBAAkB;AAAA,MACtB,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,MACP,WAAW;AAAA,IACb;AAEA,QAAI,aAAa,QAAQ;AACvB,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAEA;AAAA;AAAA,MAEE,aAAa,gBACZ,aAAa,UACZ,OAAO,OAAO,4BAA4B,EAAE;AAAA,QAC1C;AAAA,MACF;AAAA,MACF;AACA,UAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,SAAS,gBAAgB;AAAA,QACzB,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MACvC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA,SAAS,YAAY;AAAA,MACrB,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,wCAAwC,SAAS,UAAU;AAAA,MACpE,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AACH,UAAM,EAAE,SAAS,KAAK,IAAI;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO,IAAI,4BAA4B,SAAS,UAAU;AAAA,MACxD,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,QAAM,WAAW,SACf,SACA,UACA;AACA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,WAAS,OAAO;AAChB,WAAS,YAAY;AAErB,WAAS,aAAa;AACtB,WAAS,kBAAkB;AAE3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,WAAW,eAAe,CAAC,CAAC;","names":["UnsupportedFunctionalityError","createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","UnsupportedFunctionalityError","_a","toolCall","z"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friendliai/ai-provider",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.4",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -25,17 +25,17 @@
|
|
|
25
25
|
}
|
|
26
26
|
},
|
|
27
27
|
"dependencies": {
|
|
28
|
-
"@ai-sdk/openai-compatible": "^0.
|
|
29
|
-
"@ai-sdk/provider": "1.0.
|
|
30
|
-
"@ai-sdk/provider-utils": "2.
|
|
28
|
+
"@ai-sdk/openai-compatible": "^0.1.11",
|
|
29
|
+
"@ai-sdk/provider": "1.0.8",
|
|
30
|
+
"@ai-sdk/provider-utils": "2.1.9"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
|
33
33
|
"@edge-runtime/vm": "^5.0.0",
|
|
34
|
-
"@types/node": "^22.
|
|
35
|
-
"
|
|
36
|
-
"
|
|
37
|
-
"
|
|
38
|
-
"
|
|
34
|
+
"@types/node": "^22.13.5",
|
|
35
|
+
"globals": "^16.0.0",
|
|
36
|
+
"tsup": "^8.3.6",
|
|
37
|
+
"typescript": "5.7.3",
|
|
38
|
+
"zod": "3.24.2"
|
|
39
39
|
},
|
|
40
40
|
"peerDependencies": {
|
|
41
41
|
"zod": "^3.0.0"
|