@friendliai/ai-provider 0.3.0-beta.7 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # @friendliai/ai-provider
2
2
 
3
+ ## 0.3.1
4
+
5
+ ### Patch Changes
6
+
7
+ - d6a924b: For convenience, parse_reasoning is set to true by default.
8
+
9
+ ## 0.3.0
10
+
11
+ ### Patch Changes
12
+
13
+ - 10ab7aa: add chat_template_kwargs option
14
+ - 10ab7aa: Added supported type model to friendli-provider
15
+ - 10ab7aa: bump v5 package latest
16
+ - 10ab7aa: dump deps (alpha.6)
17
+ - 10ab7aa: Initial build for ai sdk v5 support (broken Friendli compatibility)
18
+ - 10ab7aa: As of 25.08.19, the latest serverless list and model list are synchronized.
19
+ - 10ab7aa: enable regex on v5 (beta)
20
+ - 10ab7aa: Add getAvailableModels() for dynamic model discovery
21
+ ref: https://vercel.com/docs/ai-gateway/models-and-providers#dynamic-model-discovery
22
+ - 10ab7aa: enable support Tool Assisted API
23
+
3
24
  ## 0.3.0-beta.7
4
25
 
5
26
  ### Patch Changes
package/README.md CHANGED
@@ -1,115 +1,34 @@
1
+ <!-- header start -->
2
+ <p align="center">
3
+ <img src="https://huggingface.co/datasets/FriendliAI/documentation-images/resolve/main/model-card-assets/friendliai.png" width="100%" alt="FriendliAI Logo">
4
+ </p>
5
+ <!-- header end -->
6
+
1
7
  # @friendliai/ai-provider
2
8
 
3
- Learn how to use the FriendliAI provider for the Vercel AI SDK.
9
+ [![npm](https://img.shields.io/npm/v/@friendliai/ai-provider)](https://www.npmjs.com/package/@friendliai/ai-provider)
10
+ [![npm](https://img.shields.io/npm/dt/@friendliai/ai-provider)](https://www.npmjs.com/package/@friendliai/ai-provider)
4
11
 
5
- ## Installation
12
+ A provider to use FriendliAI models with the Vercel AI SDK and OpenAI-compatible APIs.
6
13
 
7
- You can install the package via npm:
14
+ ## Installation
8
15
 
9
16
  ```bash
10
17
  npm i @friendliai/ai-provider
11
18
  ```
12
19
 
13
- ## Credentials
14
-
15
- The tokens required for model usage can be obtained from the [Friendli suite](https://friendli.ai/suite/).
16
-
17
- To use the provider, you need to set the `FRIENDLI_TOKEN` environment variable with your personal access token.
18
-
19
- ```bash
20
- export FRIENDLI_TOKEN="YOUR_FRIENDLI_TOKEN"
21
- ```
22
-
23
- Check the [FriendliAI documentation](https://friendli.ai/docs/guides/personal_access_tokens) for more information.
24
-
25
- ## Provider Instance
20
+ ## Example
26
21
 
27
22
  ```ts
28
- import { friendli } from '@friendliai/ai-provider'
29
- ```
30
-
31
- ## Language Models
32
-
33
- You can create [FriendliAI models](https://friendli.ai/docs/guides/serverless_endpoints/text_generation#model-supports) using a provider instance.
34
- The first argument is the model id, e.g. `meta-llama-3.1-8b-instruct`.
35
-
36
- ```ts
37
- const model = friendli('meta-llama-3.1-8b-instruct')
38
- ```
39
-
40
- ### Example: Generating text
41
-
42
- You can use FriendliAI language models to generate text with the `generateText` function:
43
-
44
- ```ts
45
- import { friendli } from "@friendliai/ai-provider";
46
- import { generateText } from 'ai'
23
+ import { friendli } from '@friendliai/ai-provider';
24
+ import { generateText } from 'ai';
47
25
 
48
26
  const { text } = await generateText({
49
- model: friendli('meta-llama-3.1-8b-instruct')
27
+ model: friendli('meta-llama-3.3-70b-instruct'),
50
28
  prompt: 'What is the meaning of life?',
51
- })
52
- ```
53
-
54
- ### Example: Using Enforcing Patterns (Regex)
55
-
56
- Specify a specific pattern (e.g., CSV), character sets, or specific language characters (e.g., Korean Hangul characters) for your LLM's output.
57
-
58
- ```ts
59
- import { friendli } from '@friendliai/ai-provider'
60
- import { generateText } from 'ai'
61
-
62
- const { text } = await generateText({
63
- model: friendli('meta-llama-3.1-8b-instruct', {
64
- regex: '[\n ,.?!0-9\uac00-\ud7af]*',
65
- }),
66
- maxTokens: 40,
67
- prompt: 'who is the first king of the Joseon Dynasty?',
68
- })
69
-
70
- console.log(text)
29
+ maxTokens: 20,
30
+ });
31
+ console.log(text);
71
32
  ```
72
33
 
73
- ### Example: Using built-in tools (Beta)
74
-
75
- If you use `@friendliai/ai-provider`, you can use the [built-in tools](https://friendli.ai/docs/guides/serverless_endpoints/tools/built_in_tools) via the `tools` option.
76
-
77
- Built-in tools allow models to use tools to generate better answers. For example, a `web:search` tool can provide up-to-date answers to current questions.
78
-
79
- ```ts highlight="1,8,9,10,11,12,13,14,15"
80
- import { friendli } from '@friendliai/ai-provider'
81
- import { convertToCoreMessages, streamText } from 'ai'
82
-
83
- export async function POST(req: Request) {
84
- const { messages } = await req.json()
85
-
86
- const result = await streamText({
87
- model: friendli('meta-llama-3.1-8b-instruct', {
88
- tools: [
89
- { type: 'web:search' },
90
- { type: 'math:calculator' },
91
- { type: 'code:python-interpreter' }, // and more tools..!!
92
- ],
93
- }),
94
- messages: convertToCoreMessages(messages),
95
- })
96
-
97
- return result.toDataStreamResponse()
98
- }
99
- ```
100
-
101
- FriendliAI language models can also be used in the `streamText`, `generateObject`, `streamObject`, and `streamUI` functions.
102
- (see [AI SDK Core](/docs/ai-sdk-core) and [AI SDK RSC](/docs/ai-sdk-rsc)).
103
-
104
- ## OpenAI Compatibility
105
-
106
- We can also use `@ai-sdk/openai` with OpenAI compatibility.
107
-
108
- ```ts
109
- import { createOpenAI } from '@ai-sdk/openai'
110
-
111
- const friendli = createOpenAI({
112
- baseURL: 'https://api.friendli.ai/serverless/v1',
113
- apiKey: process.env.FRIENDLI_TOKEN,
114
- })
115
- ```
34
+ For more details, see the [official documentation](https://friendli.ai/docs/).
package/dist/index.js CHANGED
@@ -72,9 +72,7 @@ var friendliaiErrorStructure = {
72
72
  errorSchema: friendliaiErrorSchema,
73
73
  errorToMessage: (data) => data.message
74
74
  };
75
- var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(
76
- friendliaiErrorStructure
77
- );
75
+ var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(friendliaiErrorStructure);
78
76
 
79
77
  // src/friendli-prepare-tools.ts
80
78
  var import_provider = require("@ai-sdk/provider");
@@ -142,12 +140,8 @@ var FriendliAIChatLanguageModel = class {
142
140
  this.modelId = modelId;
143
141
  this.config = config;
144
142
  const errorStructure = friendliaiErrorStructure;
145
- this.chunkSchema = createOpenAICompatibleChatChunkSchema(
146
- errorStructure.errorSchema
147
- );
148
- this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(
149
- friendliaiErrorStructure
150
- );
143
+ this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
144
+ this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(friendliaiErrorStructure);
151
145
  this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
152
146
  }
153
147
  get provider() {
@@ -200,10 +194,10 @@ var FriendliAIChatLanguageModel = class {
200
194
  });
201
195
  return {
202
196
  args: {
203
- // model id:
197
+ // >>> hard-coded default options >>>
198
+ parse_reasoning: true,
199
+ // <<< hard-coded default options <<<
204
200
  model: this.modelId,
205
- // model specific settings:
206
- // user: compatibleOptions.user,
207
201
  // standardized settings:
208
202
  stream,
209
203
  max_tokens: maxOutputTokens,
@@ -253,9 +247,7 @@ var FriendliAIChatLanguageModel = class {
253
247
  headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
254
248
  body: args,
255
249
  failedResponseHandler: this.failedResponseHandler,
256
- successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
257
- OpenAICompatibleChatResponseSchema
258
- ),
250
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(OpenAICompatibleChatResponseSchema),
259
251
  abortSignal: options.abortSignal,
260
252
  fetch: this.config.fetch
261
253
  });
@@ -320,9 +312,7 @@ var FriendliAIChatLanguageModel = class {
320
312
  headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
321
313
  body,
322
314
  failedResponseHandler: this.failedResponseHandler,
323
- successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
324
- this.chunkSchema
325
- ),
315
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(this.chunkSchema),
326
316
  abortSignal: options.abortSignal,
327
317
  fetch: this.config.fetch
328
318
  });
@@ -397,9 +387,7 @@ var FriendliAIChatLanguageModel = class {
397
387
  }
398
388
  const choice = value.choices[0];
399
389
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
400
- finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(
401
- choice.finish_reason
402
- );
390
+ finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason);
403
391
  }
404
392
  if ((choice == null ? void 0 : choice.delta) == null) {
405
393
  return;
@@ -892,9 +880,7 @@ function createFriendli(options = {}) {
892
880
  type: "serverless-tools"
893
881
  };
894
882
  default:
895
- if (FriendliAIServerlessModelIds.includes(
896
- modelId
897
- )) {
883
+ if (FriendliAIServerlessModelIds.includes(modelId)) {
898
884
  return {
899
885
  baseURL: FriendliBaseURL.serverless,
900
886
  type: "serverless"
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\nexport type {\n FriendliAvailableModelsResponse,\n FriendliAvailableModel,\n} from './get-available-models'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { getAvailableModelsImpl } from './get-available-models'\nimport type { FriendliAvailableModelsResponse } from './get-available-models'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2\n textEmbeddingModel(modelId: string & {}): LanguageModelV2\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: {\n graphqlURL?: string\n }): Promise<FriendliAvailableModelsResponse>\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n ;(provider as unknown as FriendliAIProvider).getAvailableModels =\n async (opts?: { graphqlURL?: string }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql'\n const graphqlURL = opts?.graphqlURL ?? defaultURL\n const apiKey = options.apiKey\n const teamId = options.teamId\n const headers = options.headers\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL })\n }\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils'\n\ntype Pricing = {\n inputToken?: number\n outputToken?: number\n responseTime?: number\n unitType?: 'TOKEN' | 'SECOND'\n currency?: string\n unit?: string\n}\n\nexport type FriendliAvailableModel = {\n id: string\n name?: string | null\n description?: string | null\n pricing?: Pricing\n warm?: boolean\n cold?: boolean\n contextLength?: number | null\n}\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[]\n}\n\ntype GraphQLResponse<T> = {\n data?: T\n errors?: Array<{ message: string }>\n}\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql'\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string\n variables?: Record<string, unknown>\n operationName?: string\n },\n headers: Record<string, string>,\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n })\n\n let json: GraphQLResponse<T>\n try {\n json = await res.json()\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText,\n )\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`,\n )\n }\n return json\n}\n\ntype ServerlessEndpointEdge = {\n id: string\n name?: string | null\n status?: 'WARM' | 'COLD' | string | null\n price?: {\n inputPrice?: number | null\n outputPrice?: number | null\n unit?: string | null\n responseTimePrice?: number | null\n priceUnitType?: 'TOKEN' | 'SECOND' | null\n } | null\n contextLength?: number | null\n}\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[]\n } | null\n}\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined\n return unit\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string\n teamId?: string\n headers?: Record<string, string>\n graphqlURL?: string\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })\n } catch {\n token = undefined\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n }\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers,\n )\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`,\n )\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? []\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM'\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as\n | 'TOKEN'\n | 'SECOND'\n | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n }\n })\n\n return { models }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsB,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AG32BD,IAAAE,yBAA2B;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,gBACA,mCAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MAIpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACtLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AN8CO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC7B,EAAC,SAA2C,qBAC3C,OAAO,SAAmC;AAzO9C;AA0OM,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACF,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall","import_provider_utils","_a","_b","_c","_d","_e","_f"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider';\nexport type { FriendliAIErrorData } from './friendli-error';\nexport type { FriendliAIProvider, FriendliAIProviderSettings } from './friendli-provider';\nexport type {\n FriendliAvailableModelsResponse,\n FriendliAvailableModel,\n} from './get-available-models';\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible';\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings';\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model';\nimport { friendliaiErrorStructure } from './friendli-error';\nimport { getAvailableModelsImpl } from './get-available-models';\nimport type { FriendliAvailableModelsResponse } from './get-available-models';\nimport { friendliTools } from './friendli-tools';\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string;\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools';\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>;\n /**\n * FriendliAI Team ID.\n */\n teamId?: string;\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2;\n textEmbeddingModel(modelId: string & {}): LanguageModelV2;\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: { graphqlURL?: string }): Promise<FriendliAvailableModelsResponse>;\n embedding(modelId: string & {}): EmbeddingModelV2<string>;\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2;\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2;\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2;\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools;\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(options: FriendliAIProviderSettings = {}): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n });\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL: string | 'dedicated' | 'serverless' | 'serverless-tools' | undefined\n ): {\n baseURL: string;\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom';\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n };\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL);\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' };\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n };\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n };\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n };\n default:\n if (FriendliAIServerlessModelIds.includes(modelId as FriendliAIServerlessModelId)) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n };\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n };\n }\n }\n };\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n };\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n });\n };\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n };\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n };\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n };\n\n const provider = (modelId: FriendliAILanguageModelId) => createLanguageModel(modelId);\n\n provider.languageModel = createLanguageModel;\n provider.chat = createLanguageModel;\n provider.completion = createCompletionModel;\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n (provider as unknown as FriendliAIProvider).getAvailableModels = async (opts?: {\n graphqlURL?: string;\n }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql';\n const graphqlURL = opts?.graphqlURL ?? defaultURL;\n const apiKey = options.apiKey;\n const teamId = options.teamId;\n const headers = options.headers;\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL });\n };\n provider.imageModel = createImageModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n\n provider.tools = friendliTools;\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider;\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli();\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const;\n\nexport type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];\n\nexport type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal';\n\nimport { z } from 'zod';\n\nimport { FriendliAILanguageModelId } from './friendli-settings';\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error';\nimport { prepareTools } from './friendli-prepare-tools';\nimport { MetadataExtractor, ProviderErrorStructure } from '@ai-sdk/openai-compatible';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: FriendliAILanguageModelId;\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(modelId: FriendliAILanguageModelId, config: OpenAICompatibleChatConfig) {\n this.modelId = modelId;\n // this.settings = settings\n this.config = config;\n\n const errorStructure = friendliaiErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);\n\n this.failedResponseHandler = createJsonErrorResponseHandler(friendliaiErrorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean;\n }) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n });\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // >>> hard-coded default options >>>\n parse_reasoning: true,\n // <<< hard-coded default options <<<\n\n model: this.modelId,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true && responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(OpenAICompatibleChatResponseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ?? undefined,\n cachedInputTokens: responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage ? { include_usage: true } : undefined,\n };\n\n const metadataExtractor = this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(this.chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n totalTokens: number | undefined;\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai';\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n usage.totalTokens = total_tokens ?? undefined;\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (completion_tokens_details?.accepted_prediction_tokens != null) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (completion_tokens_details?.rejected_prediction_tokens != null) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens = prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (toolCall.function?.name != null && toolCall.function?.arguments != null) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments += toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.acceptedPredictionTokens != null) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (usage.completionTokensDetails.rejectedPredictionTokens != null) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens: usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens: usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n });\n },\n })\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n })\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n })\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n })\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n]);\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n })\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n })\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n});\n\nexport type FriendliProviderOptions = z.infer<typeof friendliProviderOptionsSchema>;\n","import { z } from 'zod';\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n});\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> = {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n};\n\nexport const friendliaiFailedResponseHandler =\n createJsonErrorResponseHandler(friendliaiErrorStructure);\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: string;\n files?: string[];\n }>\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }\n | {\n type: string;\n }\n > = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils';\n\ntype Pricing = {\n inputToken?: number;\n outputToken?: number;\n responseTime?: number;\n unitType?: 'TOKEN' | 'SECOND';\n currency?: string;\n unit?: string;\n};\n\nexport type FriendliAvailableModel = {\n id: string;\n name?: string | null;\n description?: string | null;\n pricing?: Pricing;\n warm?: boolean;\n cold?: boolean;\n contextLength?: number | null;\n};\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[];\n};\n\ntype GraphQLResponse<T> = {\n data?: T;\n errors?: Array<{ message: string }>;\n};\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql';\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string;\n variables?: Record<string, unknown>;\n operationName?: string;\n },\n headers: Record<string, string>\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n });\n\n let json: GraphQLResponse<T>;\n try {\n json = await res.json();\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText\n );\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`\n );\n }\n return json;\n}\n\ntype ServerlessEndpointEdge = {\n id: string;\n name?: string | null;\n status?: 'WARM' | 'COLD' | string | null;\n price?: {\n inputPrice?: number | null;\n outputPrice?: number | null;\n unit?: string | null;\n responseTimePrice?: number | null;\n priceUnitType?: 'TOKEN' | 'SECOND' | null;\n } | null;\n contextLength?: number | null;\n};\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[];\n } | null;\n};\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined;\n return unit;\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string;\n teamId?: string;\n headers?: Record<string, string>;\n graphqlURL?: string;\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined;\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n });\n } catch {\n token = undefined;\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n };\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL;\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `;\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers\n );\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`\n );\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? [];\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM';\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as 'TOKEN' | 'SECOND' | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined;\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n };\n });\n\n return { models };\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider';\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n };\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n };\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n };\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n };\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n };\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n };\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAAgE;AAChE,+BAAwD;;;ACRjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BAAwE;AAAA,EACnF,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEO,IAAM,sCACX,sDAA+B,wBAAwB;;;ACjBzD,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF1GO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YAAY,SAAoC,QAAoC;AAXpF,SAAS,uBAAuB;AA/DlC;AA2EI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc,sCAAsC,eAAe,WAAW;AAEnF,SAAK,4BAAwB,uDAA+B,wBAAwB;AAEpF,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AA3FtB;AA4FI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA/GL;AAgHI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,iBAAiB;AAAA;AAAA,QAGjB,OAAO,KAAK;AAAA;AAAA,QAGZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QAAQ,eAAe,UAAU,OAClE;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1NjE;AA2NI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B,kDAA0B,kCAAkC;AAAA,MACvF,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YAAmE;AAAA,QACrE,oBAAmB,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MACjF;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvT/D;AAwTI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eAAe,EAAE,eAAe,KAAK,IAAI;AAAA,IACvE;AAEA,UAAM,qBAAoB,UAAK,OAAO,sBAAZ,mBAA+B;AAEzD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B,yDAAiC,KAAK,WAAW;AAAA,MAC5E,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AApYvC,gBAAAC,KAAA;AAsYY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,uEAA2B,+BAA8B,MAAM;AACjE,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,uEAA2B,+BAA8B,MAAM;AACjE,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eAAe,+DAAuB;AAAA,cAClE;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe,iDAAgC,OAAO,aAAa;AAAA,YACrE;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBAAI,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAAQ,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAAM;AAE3E,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cAAa,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACvE;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxlB5B,gBAAAD,KAAA;AAylBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,4BAA4B,MAAM;AAClE,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,wBAAwB,4BAA4B,MAAM;AAClE,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBAAiB,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBAClE,oBAAmB,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC/D;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsB,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGr0BD,IAAAE,yBAA2B;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,gBACA,mCAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MACpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACnLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ANwCO,SAAS,eAAe,UAAsC,CAAC,GAAuB;AAC3F,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YAIG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YAAI,6BAA6B,SAAS,OAAsC,GAAG;AACjF,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAAuC,oBAAoB,OAAO;AAEpF,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,EAAC,SAA2C,qBAAqB,OAAO,SAElE;AAxNR;AAyNI,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACA,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall","import_provider_utils","_a","_b","_c","_d","_e","_f"]}
package/dist/index.mjs CHANGED
@@ -2,10 +2,7 @@
2
2
  import {
3
3
  NoSuchModelError
4
4
  } from "@ai-sdk/provider";
5
- import {
6
- loadApiKey as loadApiKey2,
7
- withoutTrailingSlash
8
- } from "@ai-sdk/provider-utils";
5
+ import { loadApiKey as loadApiKey2, withoutTrailingSlash } from "@ai-sdk/provider-utils";
9
6
  import { OpenAICompatibleCompletionLanguageModel } from "@ai-sdk/openai-compatible";
10
7
 
11
8
  // src/friendli-settings.ts
@@ -65,9 +62,7 @@ var friendliaiErrorStructure = {
65
62
  errorSchema: friendliaiErrorSchema,
66
63
  errorToMessage: (data) => data.message
67
64
  };
68
- var friendliaiFailedResponseHandler = createJsonErrorResponseHandler(
69
- friendliaiErrorStructure
70
- );
65
+ var friendliaiFailedResponseHandler = createJsonErrorResponseHandler(friendliaiErrorStructure);
71
66
 
72
67
  // src/friendli-prepare-tools.ts
73
68
  import {
@@ -137,12 +132,8 @@ var FriendliAIChatLanguageModel = class {
137
132
  this.modelId = modelId;
138
133
  this.config = config;
139
134
  const errorStructure = friendliaiErrorStructure;
140
- this.chunkSchema = createOpenAICompatibleChatChunkSchema(
141
- errorStructure.errorSchema
142
- );
143
- this.failedResponseHandler = createJsonErrorResponseHandler2(
144
- friendliaiErrorStructure
145
- );
135
+ this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
136
+ this.failedResponseHandler = createJsonErrorResponseHandler2(friendliaiErrorStructure);
146
137
  this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
147
138
  }
148
139
  get provider() {
@@ -195,10 +186,10 @@ var FriendliAIChatLanguageModel = class {
195
186
  });
196
187
  return {
197
188
  args: {
198
- // model id:
189
+ // >>> hard-coded default options >>>
190
+ parse_reasoning: true,
191
+ // <<< hard-coded default options <<<
199
192
  model: this.modelId,
200
- // model specific settings:
201
- // user: compatibleOptions.user,
202
193
  // standardized settings:
203
194
  stream,
204
195
  max_tokens: maxOutputTokens,
@@ -248,9 +239,7 @@ var FriendliAIChatLanguageModel = class {
248
239
  headers: combineHeaders(this.config.headers(), options.headers),
249
240
  body: args,
250
241
  failedResponseHandler: this.failedResponseHandler,
251
- successfulResponseHandler: createJsonResponseHandler(
252
- OpenAICompatibleChatResponseSchema
253
- ),
242
+ successfulResponseHandler: createJsonResponseHandler(OpenAICompatibleChatResponseSchema),
254
243
  abortSignal: options.abortSignal,
255
244
  fetch: this.config.fetch
256
245
  });
@@ -315,9 +304,7 @@ var FriendliAIChatLanguageModel = class {
315
304
  headers: combineHeaders(this.config.headers(), options.headers),
316
305
  body,
317
306
  failedResponseHandler: this.failedResponseHandler,
318
- successfulResponseHandler: createEventSourceResponseHandler(
319
- this.chunkSchema
320
- ),
307
+ successfulResponseHandler: createEventSourceResponseHandler(this.chunkSchema),
321
308
  abortSignal: options.abortSignal,
322
309
  fetch: this.config.fetch
323
310
  });
@@ -392,9 +379,7 @@ var FriendliAIChatLanguageModel = class {
392
379
  }
393
380
  const choice = value.choices[0];
394
381
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
395
- finishReason = mapOpenAICompatibleFinishReason(
396
- choice.finish_reason
397
- );
382
+ finishReason = mapOpenAICompatibleFinishReason(choice.finish_reason);
398
383
  }
399
384
  if ((choice == null ? void 0 : choice.delta) == null) {
400
385
  return;
@@ -887,9 +872,7 @@ function createFriendli(options = {}) {
887
872
  type: "serverless-tools"
888
873
  };
889
874
  default:
890
- if (FriendliAIServerlessModelIds.includes(
891
- modelId
892
- )) {
875
+ if (FriendliAIServerlessModelIds.includes(modelId)) {
893
876
  return {
894
877
  baseURL: FriendliBaseURL.serverless,
895
878
  type: "serverless"
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { getAvailableModelsImpl } from './get-available-models'\nimport type { FriendliAvailableModelsResponse } from './get-available-models'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2\n textEmbeddingModel(modelId: string & {}): LanguageModelV2\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: {\n graphqlURL?: string\n }): Promise<FriendliAvailableModelsResponse>\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n ;(provider as unknown as FriendliAIProvider).getAvailableModels =\n async (opts?: { graphqlURL?: string }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql'\n const graphqlURL = opts?.graphqlURL ?? defaultURL\n const apiKey = options.apiKey\n const teamId = options.teamId\n const headers = options.headers\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL })\n }\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils'\n\ntype Pricing = {\n inputToken?: number\n outputToken?: number\n responseTime?: number\n unitType?: 'TOKEN' | 'SECOND'\n currency?: string\n unit?: string\n}\n\nexport type FriendliAvailableModel = {\n id: string\n name?: string | null\n description?: string | null\n pricing?: Pricing\n warm?: boolean\n cold?: boolean\n contextLength?: number | null\n}\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[]\n}\n\ntype GraphQLResponse<T> = {\n data?: T\n errors?: Array<{ message: string }>\n}\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql'\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string\n variables?: Record<string, unknown>\n operationName?: string\n },\n headers: Record<string, string>,\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n })\n\n let json: GraphQLResponse<T>\n try {\n json = await res.json()\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText,\n )\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`,\n )\n }\n return json\n}\n\ntype ServerlessEndpointEdge = {\n id: string\n name?: string | null\n status?: 'WARM' | 'COLD' | string | null\n price?: {\n inputPrice?: number | null\n outputPrice?: number | null\n unit?: string | null\n responseTimePrice?: number | null\n priceUnitType?: 'TOKEN' | 'SECOND' | null\n } | null\n contextLength?: number | null\n}\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[]\n } | null\n}\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined\n return unit\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string\n teamId?: string\n headers?: Record<string, string>\n graphqlURL?: string\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })\n } catch {\n token = undefined\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n }\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers,\n )\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`,\n )\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? []\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM'\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as\n | 'TOKEN'\n | 'SECOND'\n | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n }\n })\n\n return { models }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE,cAAAA;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsBA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AG32BD,SAAS,kBAAkB;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,YACA,WAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MAIpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACtLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AN8CO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAUC,YAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC7B,EAAC,SAA2C,qBAC3C,OAAO,SAAmC;AAzO9C;AA0OM,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACF,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["loadApiKey","createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z","_a","_b","_c","_d","_e","_f","loadApiKey"]}
1
+ {"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/get-available-models.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible';\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings';\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model';\nimport { friendliaiErrorStructure } from './friendli-error';\nimport { getAvailableModelsImpl } from './get-available-models';\nimport type { FriendliAvailableModelsResponse } from './get-available-models';\nimport { friendliTools } from './friendli-tools';\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string;\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools';\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>;\n /**\n * FriendliAI Team ID.\n */\n teamId?: string;\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2;\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): LanguageModelV2;\n textEmbeddingModel(modelId: string & {}): LanguageModelV2;\n /**\n * Returns the available models and their metadata.\n */\n getAvailableModels(options?: { graphqlURL?: string }): Promise<FriendliAvailableModelsResponse>;\n embedding(modelId: string & {}): EmbeddingModelV2<string>;\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2;\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2;\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2;\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools;\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(options: FriendliAIProviderSettings = {}): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n });\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL: string | 'dedicated' | 'serverless' | 'serverless-tools' | undefined\n ): {\n baseURL: string;\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom';\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n };\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL);\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' };\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n };\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n };\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n };\n default:\n if (FriendliAIServerlessModelIds.includes(modelId as FriendliAIServerlessModelId)) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n };\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n };\n }\n }\n };\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n };\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL);\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n });\n };\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n };\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n };\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' });\n };\n\n const provider = (modelId: FriendliAILanguageModelId) => createLanguageModel(modelId);\n\n provider.languageModel = createLanguageModel;\n provider.chat = createLanguageModel;\n provider.completion = createCompletionModel;\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n (provider as unknown as FriendliAIProvider).getAvailableModels = async (opts?: {\n graphqlURL?: string;\n }) => {\n const defaultURL = 'https://api-internal.friendli.ai/api/graphql';\n const graphqlURL = opts?.graphqlURL ?? defaultURL;\n const apiKey = options.apiKey;\n const teamId = options.teamId;\n const headers = options.headers;\n return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL });\n };\n provider.imageModel = createImageModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n\n provider.tools = friendliTools;\n\n // 'getAvailableModels' is declared here.\n return provider as unknown as FriendliAIProvider;\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli();\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'meta-llama-3.3-70b-instruct',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n\n 'LGAI-EXAONE/EXAONE-4.0.1-32B',\n 'skt/A.X-3.1',\n 'skt/A.X-4.0',\n 'naver-hyperclovax/HyperCLOVAX-SEED-Think-14B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n\n 'mistralai/Magistral-Small-2506',\n 'mistralai/Devstral-Small-2505',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n\n 'Qwen/Qwen3-32B',\n 'Qwen/Qwen3-30B-A3B',\n 'Qwen/Qwen3-235B-A22B-Thinking-2507',\n 'Qwen/Qwen3-235B-A22B-Instruct-2507',\n\n 'deepseek-ai/DeepSeek-R1-0528',\n 'google/gemma-3-27b-it',\n] as const;\n\nexport type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];\n\nexport type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal';\n\nimport { z } from 'zod';\n\nimport { FriendliAILanguageModelId } from './friendli-settings';\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error';\nimport { prepareTools } from './friendli-prepare-tools';\nimport { MetadataExtractor, ProviderErrorStructure } from '@ai-sdk/openai-compatible';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: FriendliAILanguageModelId;\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(modelId: FriendliAILanguageModelId, config: OpenAICompatibleChatConfig) {\n this.modelId = modelId;\n // this.settings = settings\n this.config = config;\n\n const errorStructure = friendliaiErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);\n\n this.failedResponseHandler = createJsonErrorResponseHandler(friendliaiErrorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean;\n }) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n });\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // >>> hard-coded default options >>>\n parse_reasoning: true,\n // <<< hard-coded default options <<<\n\n model: this.modelId,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true && responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(OpenAICompatibleChatResponseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ?? undefined,\n cachedInputTokens: responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage ? { include_usage: true } : undefined,\n };\n\n const metadataExtractor = this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(this.chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n totalTokens: number | undefined;\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai';\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n usage.totalTokens = total_tokens ?? undefined;\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (completion_tokens_details?.accepted_prediction_tokens != null) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (completion_tokens_details?.rejected_prediction_tokens != null) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens = prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (toolCall.function?.name != null && toolCall.function?.arguments != null) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments += toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.acceptedPredictionTokens != null) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (usage.completionTokensDetails.rejectedPredictionTokens != null) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens: usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens: usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n });\n },\n })\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n })\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n })\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n })\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n]);\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n })\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n })\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n })\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n});\n\nexport type FriendliProviderOptions = z.infer<typeof friendliProviderOptionsSchema>;\n","import { z } from 'zod';\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n});\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> = {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n};\n\nexport const friendliaiFailedResponseHandler =\n createJsonErrorResponseHandler(friendliaiErrorStructure);\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: string;\n files?: string[];\n }>\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }\n | {\n type: string;\n }\n > = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { loadApiKey } from '@ai-sdk/provider-utils';\n\ntype Pricing = {\n inputToken?: number;\n outputToken?: number;\n responseTime?: number;\n unitType?: 'TOKEN' | 'SECOND';\n currency?: string;\n unit?: string;\n};\n\nexport type FriendliAvailableModel = {\n id: string;\n name?: string | null;\n description?: string | null;\n pricing?: Pricing;\n warm?: boolean;\n cold?: boolean;\n contextLength?: number | null;\n};\n\nexport type FriendliAvailableModelsResponse = {\n models: FriendliAvailableModel[];\n};\n\ntype GraphQLResponse<T> = {\n data?: T;\n errors?: Array<{ message: string }>;\n};\n\nconst DEFAULT_GRAPHQL_URL = 'https://api-internal.friendli.ai/api/graphql';\n\nasync function postGraphQL<T>(\n url: string,\n body: {\n query: string;\n variables?: Record<string, unknown>;\n operationName?: string;\n },\n headers: Record<string, string>\n): Promise<GraphQLResponse<T>> {\n const res = await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: JSON.stringify(body),\n });\n\n let json: GraphQLResponse<T>;\n try {\n json = await res.json();\n } catch (err) {\n console.error(\n 'Failed to parse JSON response from Friendli API:',\n err,\n 'Status:',\n res.status,\n res.statusText\n );\n throw new Error(\n `Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`\n );\n }\n return json;\n}\n\ntype ServerlessEndpointEdge = {\n id: string;\n name?: string | null;\n status?: 'WARM' | 'COLD' | string | null;\n price?: {\n inputPrice?: number | null;\n outputPrice?: number | null;\n unit?: string | null;\n responseTimePrice?: number | null;\n priceUnitType?: 'TOKEN' | 'SECOND' | null;\n } | null;\n contextLength?: number | null;\n};\n\ntype ServerlessEndpointsQuery = {\n serverlessEndpoints?: {\n edges?: ServerlessEndpointEdge[];\n } | null;\n};\n\nfunction normalizePriceUnit(unit?: string | null): string | undefined {\n if (!unit) return undefined;\n return unit;\n}\n\nexport async function getAvailableModelsImpl(options: {\n apiKey?: string;\n teamId?: string;\n headers?: Record<string, string>;\n graphqlURL?: string;\n}): Promise<FriendliAvailableModelsResponse> {\n let token: string | undefined;\n try {\n token =\n options.apiKey ??\n loadApiKey({\n apiKey: undefined,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n });\n } catch {\n token = undefined;\n }\n\n const headers: Record<string, string> = {\n ...(token ? { Authorization: `Bearer ${token}` } : {}),\n ...(options.teamId ? { 'X-Friendli-Team': options.teamId } : {}),\n ...(options.headers ?? {}),\n };\n\n const url = options.graphqlURL ?? DEFAULT_GRAPHQL_URL;\n\n const query = `\n query Edges {\n serverlessEndpoints {\n edges {\n ... on ServerlessChatEndpointCatalog {\n id\n name\n status\n price {\n inputPrice\n outputPrice\n unit\n responseTimePrice\n priceUnitType\n }\n contextLength\n }\n }\n }\n }\n `;\n\n const resp = await postGraphQL<ServerlessEndpointsQuery>(\n url,\n { query, variables: {}, operationName: 'Edges' },\n headers\n );\n\n if (resp.errors && resp.errors.length > 0) {\n throw new Error(\n `getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join('; ')}`\n );\n }\n\n const edges = resp.data?.serverlessEndpoints?.edges ?? [];\n\n const models: FriendliAvailableModel[] = edges.map((e) => {\n const warm = e.status === 'WARM';\n const pricing: Pricing | undefined = e.price\n ? {\n inputToken: e.price.inputPrice ?? undefined,\n outputToken: e.price.outputPrice ?? undefined,\n responseTime: e.price.responseTimePrice ?? undefined,\n unitType: (e.price.priceUnitType ?? undefined) as 'TOKEN' | 'SECOND' | undefined,\n unit: normalizePriceUnit(e.price.unit),\n currency: 'USD',\n }\n : undefined;\n\n return {\n id: e.id,\n name: e.name ?? undefined,\n description: undefined,\n pricing,\n warm,\n cold: warm === false,\n contextLength: e.contextLength ?? undefined,\n };\n });\n\n return { models };\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider';\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n };\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n };\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n };\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n };\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n };\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n };\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n};\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP,SAAwB,cAAAA,aAAY,4BAA4B;AAChE,SAAS,+CAA+C;;;ACRjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AACF;;;AC5BA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BAAwE;AAAA,EACnF,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEO,IAAM,kCACX,+BAA+B,wBAAwB;;;ACjBzD;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AF1GO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YAAY,SAAoC,QAAoC;AAXpF,SAAS,uBAAuB;AA/DlC;AA2EI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc,sCAAsC,eAAe,WAAW;AAEnF,SAAK,wBAAwBC,gCAA+B,wBAAwB;AAEpF,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AA3FtB;AA4FI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA/GL;AAgHI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,iBAAiB;AAAA;AAAA,QAGjB,OAAO,KAAK;AAAA;AAAA,QAGZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QAAQ,eAAe,UAAU,OAClE;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1NjE;AA2NI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B,0BAA0B,kCAAkC;AAAA,MACvF,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YAAmE;AAAA,QACrE,oBAAmB,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MACjF;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvT/D;AAwTI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eAAe,EAAE,eAAe,KAAK,IAAI;AAAA,IACvE;AAEA,UAAM,qBAAoB,UAAK,OAAO,sBAAZ,mBAA+B;AAEzD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B,iCAAiC,KAAK,WAAW;AAAA,MAC5E,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AApYvC,gBAAAC,KAAA;AAsYY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,uEAA2B,+BAA8B,MAAM;AACjE,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,uEAA2B,+BAA8B,MAAM;AACjE,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eAAe,+DAAuB;AAAA,cAClE;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,gCAAgC,OAAO,aAAa;AAAA,YACrE;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBAAI,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAAQ,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAAM;AAE3E,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cAAa,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACvE;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxlB5B,gBAAAD,KAAA;AAylBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,4BAA4B,MAAM;AAClE,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,wBAAwB,4BAA4B,MAAM;AAClE,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBAAiB,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBAClE,oBAAmB,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC/D;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsBA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGr0BD,SAAS,kBAAkB;AA8B3B,IAAM,sBAAsB;AAE5B,eAAe,YACb,KACA,MAKA,SAC6B;AAC7B,QAAM,MAAM,MAAM,MAAM,KAAK;AAAA,IAC3B,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,IAAI;AAAA,EAC3B,CAAC;AAED,MAAI;AACJ,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,IACN;AACA,UAAM,IAAI;AAAA,MACR,oDAAoD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IACtG;AAAA,EACF;AACA,SAAO;AACT;AAsBA,SAAS,mBAAmB,MAA0C;AACpE,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO;AACT;AAEA,eAAsB,uBAAuB,SAKA;AAlG7C;AAmGE,MAAI;AACJ,MAAI;AACF,aACE,aAAQ,WAAR,YACA,WAAW;AAAA,MACT,QAAQ;AAAA,MACR,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,EACL,SAAQ;AACN,YAAQ;AAAA,EACV;AAEA,QAAM,UAAkC;AAAA,IACtC,GAAI,QAAQ,EAAE,eAAe,UAAU,KAAK,GAAG,IAAI,CAAC;AAAA,IACpD,GAAI,QAAQ,SAAS,EAAE,mBAAmB,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC9D,IAAI,aAAQ,YAAR,YAAmB,CAAC;AAAA,EAC1B;AAEA,QAAM,OAAM,aAAQ,eAAR,YAAsB;AAElC,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBd,QAAM,OAAO,MAAM;AAAA,IACjB;AAAA,IACA,EAAE,OAAO,WAAW,CAAC,GAAG,eAAe,QAAQ;AAAA,IAC/C;AAAA,EACF;AAEA,MAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,SAAQ,sBAAK,SAAL,mBAAW,wBAAX,mBAAgC,UAAhC,YAAyC,CAAC;AAExD,QAAM,SAAmC,MAAM,IAAI,CAAC,MAAM;AA5J5D,QAAAC,KAAAC,KAAAC,KAAAC,KAAAC,KAAAC;AA6JI,UAAM,OAAO,EAAE,WAAW;AAC1B,UAAM,UAA+B,EAAE,QACnC;AAAA,MACE,aAAYL,MAAA,EAAE,MAAM,eAAR,OAAAA,MAAsB;AAAA,MAClC,cAAaC,MAAA,EAAE,MAAM,gBAAR,OAAAA,MAAuB;AAAA,MACpC,eAAcC,MAAA,EAAE,MAAM,sBAAR,OAAAA,MAA6B;AAAA,MAC3C,WAAWC,MAAA,EAAE,MAAM,kBAAR,OAAAA,MAAyB;AAAA,MACpC,MAAM,mBAAmB,EAAE,MAAM,IAAI;AAAA,MACrC,UAAU;AAAA,IACZ,IACA;AAEJ,WAAO;AAAA,MACL,IAAI,EAAE;AAAA,MACN,OAAMC,MAAA,EAAE,SAAF,OAAAA,MAAU;AAAA,MAChB,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,gBAAeC,MAAA,EAAE,kBAAF,OAAAA,MAAmB;AAAA,IACpC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,OAAO;AAClB;;;ACnLA,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ANwCO,SAAS,eAAe,UAAsC,CAAC,GAAuB;AAC3F,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAUC,YAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YAIG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YAAI,6BAA6B,SAAS,OAAsC,GAAG;AACjF,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAAuC,oBAAoB,OAAO;AAEpF,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,EAAC,SAA2C,qBAAqB,OAAO,SAElE;AAxNR;AAyNI,UAAM,aAAa;AACnB,UAAM,cAAa,kCAAM,eAAN,YAAoB;AACvC,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,UAAU,QAAQ;AACxB,WAAO,uBAAuB,EAAE,QAAQ,QAAQ,SAAS,WAAW,CAAC;AAAA,EACvE;AACA,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAGjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["loadApiKey","createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z","_a","_b","_c","_d","_e","_f","loadApiKey"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@friendliai/ai-provider",
3
- "version": "0.3.0-beta.7",
3
+ "version": "0.3.1",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -25,17 +25,17 @@
25
25
  }
26
26
  },
27
27
  "dependencies": {
28
- "@ai-sdk/openai-compatible": "1.0.10",
28
+ "@ai-sdk/openai-compatible": "1.0.22",
29
29
  "@ai-sdk/provider": "2.0.0",
30
- "@ai-sdk/provider-utils": "3.0.4"
30
+ "@ai-sdk/provider-utils": "3.0.12"
31
31
  },
32
32
  "devDependencies": {
33
33
  "@edge-runtime/vm": "^5.0.0",
34
34
  "@types/node": "^24.3.0",
35
35
  "globals": "^16.3.0",
36
36
  "tsup": "^8.5.0",
37
- "typescript": "5.9.2",
38
- "zod": "4.0.17"
37
+ "typescript": "5.9.3",
38
+ "zod": "4.1.12"
39
39
  },
40
40
  "peerDependencies": {
41
41
  "zod": "^3.0.0"
@@ -44,7 +44,8 @@
44
44
  "node": ">=18"
45
45
  },
46
46
  "publishConfig": {
47
- "access": "public"
47
+ "access": "public",
48
+ "registry": "https://registry.npmjs.org"
48
49
  },
49
50
  "homepage": "https://friendli.ai/docs/sdk/integrations/vercel-ai-sdk",
50
51
  "repository": {
@@ -64,10 +65,10 @@
64
65
  "clean": "rm -rf dist && rm -rf internal/dist",
65
66
  "dev": "tsup --watch",
66
67
  "lint": "eslint \"./**/*.ts*\"",
67
- "type-check": "tsc --noEmit",
68
- "prettier-check": "prettier --check \"./**/*.ts*\"",
68
+ "typecheck": "tsc --noEmit",
69
69
  "test": "pnpm test:node && pnpm test:edge",
70
70
  "test:edge": "vitest --config vitest.edge.config.ts --run",
71
- "test:node": "vitest --config vitest.node.config.ts --run"
71
+ "test:node": "vitest --config vitest.node.config.ts --run",
72
+ "test:coverage": "pnpm test:node --coverage && pnpm test:edge --coverage"
72
73
  }
73
74
  }