@friendliai/ai-provider 0.3.0-alpha.3 → 0.3.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -2
- package/dist/index.d.mts +11 -17
- package/dist/index.d.ts +11 -17
- package/dist/index.js +37 -24
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +37 -24
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,18 @@
|
|
|
1
1
|
# @friendliai/ai-provider
|
|
2
2
|
|
|
3
|
+
## 0.3.0-beta.5
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 40e93c0: bump v5 package latest
|
|
8
|
+
|
|
9
|
+
## 0.3.0-alpha.4
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- be3a6bf: add chat_template_kwargs option
|
|
14
|
+
- 74cc1e2: dump deps (alpha.6)
|
|
15
|
+
|
|
3
16
|
## 0.2.7-alpha.2
|
|
4
17
|
|
|
5
18
|
### Patch Changes
|
|
@@ -60,7 +73,6 @@
|
|
|
60
73
|
### Minor Changes
|
|
61
74
|
|
|
62
75
|
- f262011: convert-openai-compatible-components
|
|
63
|
-
|
|
64
76
|
- dedicated, serverless automatic switching
|
|
65
77
|
- /v1/completion available
|
|
66
78
|
- Improved error parsing logic
|
|
@@ -107,7 +119,6 @@
|
|
|
107
119
|
### Patch Changes
|
|
108
120
|
|
|
109
121
|
- 7caa63e: ## Updated code to match ai sdk 4.0.10
|
|
110
|
-
|
|
111
122
|
- createFriendliAI -> createFriendli
|
|
112
123
|
- Dependency Update
|
|
113
124
|
- Updated tool call parsing logic
|
package/dist/index.d.mts
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { LanguageModelV2ProviderDefinedTool, ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV2, SpeechModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
5
|
-
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
|
|
5
|
+
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1", "LGAI-EXAONE/EXAONE-4.0-32B", "K-intelligence/Midm-2.0-Base-Instruct", "K-intelligence/Midm-2.0-Mini-Instruct", "deepseek-ai/DeepSeek-R1-0528", "meta-llama/Llama-3.1-8B-Instruct", "mistralai/Magistral-Small-2506", "deepseek-ai/DeepSeek-R1", "meta-llama/Llama-3.3-70B-Instruct", "mistralai/Devstral-Small-2505", "google/gemma-3-27b-it", "Qwen/Qwen3-32B", "meta-llama/Llama-4-Scout-17B-16E-Instruct", "Qwen/Qwen3-235B-A22B", "Qwen/Qwen3-30B-A3B", "meta-llama/Llama-4-Maverick-17B-128E-Instruct", "mistralai/Mistral-Small-3.1-24B-Instruct-2503"];
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
-
declare function webUrlBetaTool():
|
|
10
|
-
declare function webSearchBetaTool():
|
|
11
|
-
declare function mathCalendarBetaTool():
|
|
12
|
-
declare function mathStatisticsBetaTool():
|
|
13
|
-
declare function mathCalculatorBetaTool():
|
|
14
|
-
declare function codePythonInterpreterBetaTool():
|
|
9
|
+
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
|
+
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
|
+
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
12
|
+
declare function mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
13
|
+
declare function mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
14
|
+
declare function codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
15
15
|
declare const friendliTools: {
|
|
16
16
|
webSearchBetaTool: typeof webSearchBetaTool;
|
|
17
17
|
webUrlBetaTool: typeof webUrlBetaTool;
|
|
@@ -76,12 +76,12 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
76
76
|
* Creates a model for transcription.
|
|
77
77
|
* TODO: Implement for Dedicated users
|
|
78
78
|
*/
|
|
79
|
-
transcription(modelId: string & {}):
|
|
79
|
+
transcription(modelId: string & {}): TranscriptionModelV2;
|
|
80
80
|
/**
|
|
81
81
|
* Creates a model for speech generation.
|
|
82
82
|
* TODO: Implement for Dedicated users
|
|
83
83
|
*/
|
|
84
|
-
speech(modelId: string & {}):
|
|
84
|
+
speech(modelId: string & {}): SpeechModelV2;
|
|
85
85
|
/**
|
|
86
86
|
* Friendli-specific tools.
|
|
87
87
|
*/
|
|
@@ -99,13 +99,7 @@ declare const friendli: FriendliAIProvider;
|
|
|
99
99
|
declare const friendliaiErrorSchema: z.ZodObject<{
|
|
100
100
|
message: z.ZodString;
|
|
101
101
|
error: z.ZodRecord<z.ZodString, z.ZodAny>;
|
|
102
|
-
},
|
|
103
|
-
message: string;
|
|
104
|
-
error: Record<string, any>;
|
|
105
|
-
}, {
|
|
106
|
-
message: string;
|
|
107
|
-
error: Record<string, any>;
|
|
108
|
-
}>;
|
|
102
|
+
}, z.core.$strip>;
|
|
109
103
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
110
104
|
|
|
111
105
|
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { LanguageModelV2ProviderDefinedTool, ProviderV2, LanguageModelV2, EmbeddingModelV2, ImageModelV2, TranscriptionModelV2, SpeechModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
5
|
-
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
|
|
5
|
+
declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1", "LGAI-EXAONE/EXAONE-4.0-32B", "K-intelligence/Midm-2.0-Base-Instruct", "K-intelligence/Midm-2.0-Mini-Instruct", "deepseek-ai/DeepSeek-R1-0528", "meta-llama/Llama-3.1-8B-Instruct", "mistralai/Magistral-Small-2506", "deepseek-ai/DeepSeek-R1", "meta-llama/Llama-3.3-70B-Instruct", "mistralai/Devstral-Small-2505", "google/gemma-3-27b-it", "Qwen/Qwen3-32B", "meta-llama/Llama-4-Scout-17B-16E-Instruct", "Qwen/Qwen3-235B-A22B", "Qwen/Qwen3-30B-A3B", "meta-llama/Llama-4-Maverick-17B-128E-Instruct", "mistralai/Mistral-Small-3.1-24B-Instruct-2503"];
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
-
declare function webUrlBetaTool():
|
|
10
|
-
declare function webSearchBetaTool():
|
|
11
|
-
declare function mathCalendarBetaTool():
|
|
12
|
-
declare function mathStatisticsBetaTool():
|
|
13
|
-
declare function mathCalculatorBetaTool():
|
|
14
|
-
declare function codePythonInterpreterBetaTool():
|
|
9
|
+
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
|
+
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
|
+
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
12
|
+
declare function mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
13
|
+
declare function mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
14
|
+
declare function codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
15
15
|
declare const friendliTools: {
|
|
16
16
|
webSearchBetaTool: typeof webSearchBetaTool;
|
|
17
17
|
webUrlBetaTool: typeof webUrlBetaTool;
|
|
@@ -76,12 +76,12 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
76
76
|
* Creates a model for transcription.
|
|
77
77
|
* TODO: Implement for Dedicated users
|
|
78
78
|
*/
|
|
79
|
-
transcription(modelId: string & {}):
|
|
79
|
+
transcription(modelId: string & {}): TranscriptionModelV2;
|
|
80
80
|
/**
|
|
81
81
|
* Creates a model for speech generation.
|
|
82
82
|
* TODO: Implement for Dedicated users
|
|
83
83
|
*/
|
|
84
|
-
speech(modelId: string & {}):
|
|
84
|
+
speech(modelId: string & {}): SpeechModelV2;
|
|
85
85
|
/**
|
|
86
86
|
* Friendli-specific tools.
|
|
87
87
|
*/
|
|
@@ -99,13 +99,7 @@ declare const friendli: FriendliAIProvider;
|
|
|
99
99
|
declare const friendliaiErrorSchema: z.ZodObject<{
|
|
100
100
|
message: z.ZodString;
|
|
101
101
|
error: z.ZodRecord<z.ZodString, z.ZodAny>;
|
|
102
|
-
},
|
|
103
|
-
message: string;
|
|
104
|
-
error: Record<string, any>;
|
|
105
|
-
}, {
|
|
106
|
-
message: string;
|
|
107
|
-
error: Record<string, any>;
|
|
108
|
-
}>;
|
|
102
|
+
}, z.core.$strip>;
|
|
109
103
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
110
104
|
|
|
111
105
|
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
package/dist/index.js
CHANGED
|
@@ -34,7 +34,23 @@ var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
|
34
34
|
var FriendliAIServerlessModelIds = [
|
|
35
35
|
"meta-llama-3.1-8b-instruct",
|
|
36
36
|
"meta-llama-3.3-70b-instruct",
|
|
37
|
-
"deepseek-r1"
|
|
37
|
+
"deepseek-r1",
|
|
38
|
+
"LGAI-EXAONE/EXAONE-4.0-32B",
|
|
39
|
+
"K-intelligence/Midm-2.0-Base-Instruct",
|
|
40
|
+
"K-intelligence/Midm-2.0-Mini-Instruct",
|
|
41
|
+
"deepseek-ai/DeepSeek-R1-0528",
|
|
42
|
+
"meta-llama/Llama-3.1-8B-Instruct",
|
|
43
|
+
"mistralai/Magistral-Small-2506",
|
|
44
|
+
"deepseek-ai/DeepSeek-R1",
|
|
45
|
+
"meta-llama/Llama-3.3-70B-Instruct",
|
|
46
|
+
"mistralai/Devstral-Small-2505",
|
|
47
|
+
"google/gemma-3-27b-it",
|
|
48
|
+
"Qwen/Qwen3-32B",
|
|
49
|
+
"meta-llama/Llama-4-Scout-17B-16E-Instruct",
|
|
50
|
+
"Qwen/Qwen3-235B-A22B",
|
|
51
|
+
"Qwen/Qwen3-30B-A3B",
|
|
52
|
+
"meta-llama/Llama-4-Maverick-17B-128E-Instruct",
|
|
53
|
+
"mistralai/Mistral-Small-3.1-24B-Instruct-2503"
|
|
38
54
|
];
|
|
39
55
|
|
|
40
56
|
// src/friendli-chat-language-model.ts
|
|
@@ -48,7 +64,7 @@ var import_zod = require("zod");
|
|
|
48
64
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
49
65
|
var friendliaiErrorSchema = import_zod.z.object({
|
|
50
66
|
message: import_zod.z.string(),
|
|
51
|
-
error: import_zod.z.record(import_zod.z.any())
|
|
67
|
+
error: import_zod.z.record(import_zod.z.string(), import_zod.z.any())
|
|
52
68
|
});
|
|
53
69
|
var friendliaiErrorStructure = {
|
|
54
70
|
errorSchema: friendliaiErrorSchema,
|
|
@@ -83,7 +99,7 @@ function prepareTools({
|
|
|
83
99
|
function: {
|
|
84
100
|
name: tool.name,
|
|
85
101
|
description: tool.description,
|
|
86
|
-
parameters: tool.
|
|
102
|
+
parameters: tool.inputSchema
|
|
87
103
|
}
|
|
88
104
|
});
|
|
89
105
|
}
|
|
@@ -206,6 +222,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
206
222
|
} : void 0,
|
|
207
223
|
stop: stopSequences,
|
|
208
224
|
seed,
|
|
225
|
+
...(friendliOptions == null ? void 0 : friendliOptions.chat_template_kwargs) ? { chat_template_kwargs: friendliOptions.chat_template_kwargs } : {},
|
|
209
226
|
// ...providerOptions?.[this.providerOptionsName],
|
|
210
227
|
// reasoning_effort: compatibleOptions.reasoningEffort,
|
|
211
228
|
// messages:
|
|
@@ -257,10 +274,9 @@ var FriendliAIChatLanguageModel = class {
|
|
|
257
274
|
for (const toolCall of choice.message.tool_calls) {
|
|
258
275
|
content.push({
|
|
259
276
|
type: "tool-call",
|
|
260
|
-
toolCallType: "function",
|
|
261
277
|
toolCallId: (_a = toolCall.id) != null ? _a : (0, import_provider_utils2.generateId)(),
|
|
262
278
|
toolName: toolCall.function.name,
|
|
263
|
-
|
|
279
|
+
input: toolCall.function.arguments
|
|
264
280
|
});
|
|
265
281
|
}
|
|
266
282
|
}
|
|
@@ -389,14 +405,16 @@ var FriendliAIChatLanguageModel = class {
|
|
|
389
405
|
const delta = choice.delta;
|
|
390
406
|
if (delta.reasoning_content != null) {
|
|
391
407
|
controller.enqueue({
|
|
392
|
-
type: "reasoning",
|
|
393
|
-
|
|
408
|
+
type: "reasoning-delta",
|
|
409
|
+
id: (0, import_provider_utils2.generateId)(),
|
|
410
|
+
delta: delta.reasoning_content
|
|
394
411
|
});
|
|
395
412
|
}
|
|
396
413
|
if (delta.content != null) {
|
|
397
414
|
controller.enqueue({
|
|
398
|
-
type: "text",
|
|
399
|
-
|
|
415
|
+
type: "text-delta",
|
|
416
|
+
id: (0, import_provider_utils2.generateId)(),
|
|
417
|
+
delta: delta.content
|
|
400
418
|
});
|
|
401
419
|
}
|
|
402
420
|
if (delta.tool_calls != null) {
|
|
@@ -434,20 +452,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
434
452
|
if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
|
|
435
453
|
if (toolCall2.function.arguments.length > 0) {
|
|
436
454
|
controller.enqueue({
|
|
437
|
-
type: "tool-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
toolName: toolCall2.function.name,
|
|
441
|
-
argsTextDelta: toolCall2.function.arguments
|
|
455
|
+
type: "tool-input-delta",
|
|
456
|
+
id: toolCall2.id,
|
|
457
|
+
delta: toolCall2.function.arguments
|
|
442
458
|
});
|
|
443
459
|
}
|
|
444
460
|
if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
|
|
445
461
|
controller.enqueue({
|
|
446
462
|
type: "tool-call",
|
|
447
|
-
toolCallType: "function",
|
|
448
463
|
toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
|
|
449
464
|
toolName: toolCall2.function.name,
|
|
450
|
-
|
|
465
|
+
input: toolCall2.function.arguments
|
|
451
466
|
});
|
|
452
467
|
toolCall2.hasFinished = true;
|
|
453
468
|
}
|
|
@@ -462,19 +477,16 @@ var FriendliAIChatLanguageModel = class {
|
|
|
462
477
|
toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
|
|
463
478
|
}
|
|
464
479
|
controller.enqueue({
|
|
465
|
-
type: "tool-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
toolName: toolCall.function.name,
|
|
469
|
-
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
480
|
+
type: "tool-input-delta",
|
|
481
|
+
id: toolCall.id,
|
|
482
|
+
delta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
470
483
|
});
|
|
471
484
|
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
|
|
472
485
|
controller.enqueue({
|
|
473
486
|
type: "tool-call",
|
|
474
|
-
toolCallType: "function",
|
|
475
487
|
toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
|
|
476
488
|
toolName: toolCall.function.name,
|
|
477
|
-
|
|
489
|
+
input: toolCall.function.arguments
|
|
478
490
|
});
|
|
479
491
|
toolCall.hasFinished = true;
|
|
480
492
|
}
|
|
@@ -671,7 +683,8 @@ var friendliProviderOptionsSchema = import_zod2.z.object({
|
|
|
671
683
|
* BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
|
|
672
684
|
*/
|
|
673
685
|
// regex: z.instanceof(RegExp).nullish(),
|
|
674
|
-
regex: import_zod2.z.string().nullish()
|
|
686
|
+
regex: import_zod2.z.string().nullish(),
|
|
687
|
+
chat_template_kwargs: import_zod2.z.record(import_zod2.z.string(), import_zod2.z.any()).nullish()
|
|
675
688
|
});
|
|
676
689
|
|
|
677
690
|
// src/friendli-tools.ts
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\nimport { regex } from 'zod/v4'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV1ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFtGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAnElC;AAkFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAtGtB;AAuGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA1HL;AA2HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlOjE;AAmOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AApU/D;AAqUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAtZvC,gBAAAC,KAAA;AAwZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAznB5B,gBAAAD,KAAA;AA0nBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAC5B,CAAC;;;AGt2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["export { createFriendli, friendli } from './friendli-provider'\nexport type { FriendliAIErrorData } from './friendli-error'\nexport type {\n FriendliAIProvider,\n FriendliAIProviderSettings,\n} from './friendli-provider'\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n 'LGAI-EXAONE/EXAONE-4.0-32B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n 'deepseek-ai/DeepSeek-R1-0528',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'mistralai/Magistral-Small-2506',\n 'deepseek-ai/DeepSeek-R1',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'mistralai/Devstral-Small-2505',\n 'google/gemma-3-27b-it',\n 'Qwen/Qwen3-32B',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'Qwen/Qwen3-235B-A22B',\n 'Qwen/Qwen3-30B-A3B',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AACP,IAAAC,yBAIO;AACP,+BAAwD;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACtBA,IAAAC,mBAUO;AACP,IAAAC,yBAaO;AACP,sBAIO;AAEP,IAAAC,cAAkB;;;AC/BlB,iBAAkB;AAElB,4BAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,SAAS,aAAE,OAAO;AAAA,EAClB,OAAO,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,sCAAkC;AAAA,EAC7C;AACF;;;ACnBA,sBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,4BAAwB;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,UAAM,6CAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,cAAU,uDAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,kBAAc,iDAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,OAAG,qCAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAG,qCAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,iCAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+B,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4B,cAAE,MAAM;AAAA,EACxC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,OAAO;AAAA,IACf,QAAQ,cAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAAS,cAAE,KAAK;AAAA,IAChB,YAAY,cAAE;AAAA,MACZ,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAO,cACJ,OAAO;AAAA,MACN,MAAM,cAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAK,cAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAW,cAAE,OAAO;AAAA,IACpB,OAAO,cAAE,KAAK;AAAA,IACd,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsB,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGz2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,oBAAgB,6CAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,iEAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod","_a","toolCall"]}
|
package/dist/index.mjs
CHANGED
|
@@ -12,7 +12,23 @@ import { OpenAICompatibleCompletionLanguageModel } from "@ai-sdk/openai-compatib
|
|
|
12
12
|
var FriendliAIServerlessModelIds = [
|
|
13
13
|
"meta-llama-3.1-8b-instruct",
|
|
14
14
|
"meta-llama-3.3-70b-instruct",
|
|
15
|
-
"deepseek-r1"
|
|
15
|
+
"deepseek-r1",
|
|
16
|
+
"LGAI-EXAONE/EXAONE-4.0-32B",
|
|
17
|
+
"K-intelligence/Midm-2.0-Base-Instruct",
|
|
18
|
+
"K-intelligence/Midm-2.0-Mini-Instruct",
|
|
19
|
+
"deepseek-ai/DeepSeek-R1-0528",
|
|
20
|
+
"meta-llama/Llama-3.1-8B-Instruct",
|
|
21
|
+
"mistralai/Magistral-Small-2506",
|
|
22
|
+
"deepseek-ai/DeepSeek-R1",
|
|
23
|
+
"meta-llama/Llama-3.3-70B-Instruct",
|
|
24
|
+
"mistralai/Devstral-Small-2505",
|
|
25
|
+
"google/gemma-3-27b-it",
|
|
26
|
+
"Qwen/Qwen3-32B",
|
|
27
|
+
"meta-llama/Llama-4-Scout-17B-16E-Instruct",
|
|
28
|
+
"Qwen/Qwen3-235B-A22B",
|
|
29
|
+
"Qwen/Qwen3-30B-A3B",
|
|
30
|
+
"meta-llama/Llama-4-Maverick-17B-128E-Instruct",
|
|
31
|
+
"mistralai/Mistral-Small-3.1-24B-Instruct-2503"
|
|
16
32
|
];
|
|
17
33
|
|
|
18
34
|
// src/friendli-chat-language-model.ts
|
|
@@ -41,7 +57,7 @@ import { z } from "zod";
|
|
|
41
57
|
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
42
58
|
var friendliaiErrorSchema = z.object({
|
|
43
59
|
message: z.string(),
|
|
44
|
-
error: z.record(z.any())
|
|
60
|
+
error: z.record(z.string(), z.any())
|
|
45
61
|
});
|
|
46
62
|
var friendliaiErrorStructure = {
|
|
47
63
|
errorSchema: friendliaiErrorSchema,
|
|
@@ -78,7 +94,7 @@ function prepareTools({
|
|
|
78
94
|
function: {
|
|
79
95
|
name: tool.name,
|
|
80
96
|
description: tool.description,
|
|
81
|
-
parameters: tool.
|
|
97
|
+
parameters: tool.inputSchema
|
|
82
98
|
}
|
|
83
99
|
});
|
|
84
100
|
}
|
|
@@ -201,6 +217,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
201
217
|
} : void 0,
|
|
202
218
|
stop: stopSequences,
|
|
203
219
|
seed,
|
|
220
|
+
...(friendliOptions == null ? void 0 : friendliOptions.chat_template_kwargs) ? { chat_template_kwargs: friendliOptions.chat_template_kwargs } : {},
|
|
204
221
|
// ...providerOptions?.[this.providerOptionsName],
|
|
205
222
|
// reasoning_effort: compatibleOptions.reasoningEffort,
|
|
206
223
|
// messages:
|
|
@@ -252,10 +269,9 @@ var FriendliAIChatLanguageModel = class {
|
|
|
252
269
|
for (const toolCall of choice.message.tool_calls) {
|
|
253
270
|
content.push({
|
|
254
271
|
type: "tool-call",
|
|
255
|
-
toolCallType: "function",
|
|
256
272
|
toolCallId: (_a = toolCall.id) != null ? _a : generateId(),
|
|
257
273
|
toolName: toolCall.function.name,
|
|
258
|
-
|
|
274
|
+
input: toolCall.function.arguments
|
|
259
275
|
});
|
|
260
276
|
}
|
|
261
277
|
}
|
|
@@ -384,14 +400,16 @@ var FriendliAIChatLanguageModel = class {
|
|
|
384
400
|
const delta = choice.delta;
|
|
385
401
|
if (delta.reasoning_content != null) {
|
|
386
402
|
controller.enqueue({
|
|
387
|
-
type: "reasoning",
|
|
388
|
-
|
|
403
|
+
type: "reasoning-delta",
|
|
404
|
+
id: generateId(),
|
|
405
|
+
delta: delta.reasoning_content
|
|
389
406
|
});
|
|
390
407
|
}
|
|
391
408
|
if (delta.content != null) {
|
|
392
409
|
controller.enqueue({
|
|
393
|
-
type: "text",
|
|
394
|
-
|
|
410
|
+
type: "text-delta",
|
|
411
|
+
id: generateId(),
|
|
412
|
+
delta: delta.content
|
|
395
413
|
});
|
|
396
414
|
}
|
|
397
415
|
if (delta.tool_calls != null) {
|
|
@@ -429,20 +447,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
429
447
|
if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
|
|
430
448
|
if (toolCall2.function.arguments.length > 0) {
|
|
431
449
|
controller.enqueue({
|
|
432
|
-
type: "tool-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
toolName: toolCall2.function.name,
|
|
436
|
-
argsTextDelta: toolCall2.function.arguments
|
|
450
|
+
type: "tool-input-delta",
|
|
451
|
+
id: toolCall2.id,
|
|
452
|
+
delta: toolCall2.function.arguments
|
|
437
453
|
});
|
|
438
454
|
}
|
|
439
455
|
if (isParsableJson(toolCall2.function.arguments)) {
|
|
440
456
|
controller.enqueue({
|
|
441
457
|
type: "tool-call",
|
|
442
|
-
toolCallType: "function",
|
|
443
458
|
toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
|
|
444
459
|
toolName: toolCall2.function.name,
|
|
445
|
-
|
|
460
|
+
input: toolCall2.function.arguments
|
|
446
461
|
});
|
|
447
462
|
toolCall2.hasFinished = true;
|
|
448
463
|
}
|
|
@@ -457,19 +472,16 @@ var FriendliAIChatLanguageModel = class {
|
|
|
457
472
|
toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
|
|
458
473
|
}
|
|
459
474
|
controller.enqueue({
|
|
460
|
-
type: "tool-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
toolName: toolCall.function.name,
|
|
464
|
-
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
475
|
+
type: "tool-input-delta",
|
|
476
|
+
id: toolCall.id,
|
|
477
|
+
delta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
465
478
|
});
|
|
466
479
|
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
467
480
|
controller.enqueue({
|
|
468
481
|
type: "tool-call",
|
|
469
|
-
toolCallType: "function",
|
|
470
482
|
toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
|
|
471
483
|
toolName: toolCall.function.name,
|
|
472
|
-
|
|
484
|
+
input: toolCall.function.arguments
|
|
473
485
|
});
|
|
474
486
|
toolCall.hasFinished = true;
|
|
475
487
|
}
|
|
@@ -666,7 +678,8 @@ var friendliProviderOptionsSchema = z2.object({
|
|
|
666
678
|
* BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
|
|
667
679
|
*/
|
|
668
680
|
// regex: z.instanceof(RegExp).nullish(),
|
|
669
|
-
regex: z2.string().nullish()
|
|
681
|
+
regex: z2.string().nullish(),
|
|
682
|
+
chat_template_kwargs: z2.record(z2.string(), z2.any()).nullish()
|
|
670
683
|
});
|
|
671
684
|
|
|
672
685
|
// src/friendli-tools.ts
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV1,\n TranscriptionModelV1,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV1\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV1\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\nimport { regex } from 'zod/v4'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n text: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV1ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV1ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AACF;;;ACNA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC;AACzB,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFtGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAnElC;AAkFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAtGtB;AAuGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AA1HL;AA2HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA;AAAA;AAAA;AAAA,QAMA,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlOjE;AAmOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AApU/D;AAqUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAtZvC,gBAAAC,KAAA;AAwZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAznB5B,gBAAAD,KAAA;AA0nBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAGA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAC5B,CAAC;;;AGt2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/friendli-provider.ts","../src/friendli-settings.ts","../src/friendli-chat-language-model.ts","../src/friendli-error.ts","../src/friendli-prepare-tools.ts","../src/friendli-tools.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils'\nimport { OpenAICompatibleCompletionLanguageModel } from '@ai-sdk/openai-compatible'\n\nimport {\n FriendliAILanguageModelId,\n FriendliAIServerlessModelIds,\n FriendliAIServerlessModelId,\n} from './friendli-settings'\nimport { FriendliAIChatLanguageModel } from './friendli-chat-language-model'\nimport { friendliaiErrorStructure } from './friendli-error'\nimport { friendliTools } from './friendli-tools'\n\nexport interface FriendliAIProviderSettings {\n /**\n * FriendliAI API key. (FRIENDLI_TOKEN)\n */\n apiKey?: string\n /**\n * Base URL for the API calls.\n */\n baseURL?: string | 'auto' | 'dedicated' | 'serverless' | 'serverless-tools'\n /**\n * Custom headers to include in the requests.\n */\n headers?: Record<string, string>\n /**\n * FriendliAI Team ID.\n */\n teamId?: string\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction\n}\n\nexport interface FriendliAIProvider extends ProviderV2 {\n /**\n * Creates a model for text generation.\n */\n (modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n languageModel(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a chat model for text generation.\n */\n chat(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a completion model for text generation.\n */\n completion(modelId: FriendliAILanguageModelId): LanguageModelV2\n /**\n * Creates a text embedding model for text generation.\n * TODO: Implement for Dedicated users\n */\n embedding(modelId: string & {}): EmbeddingModelV2<string>\n textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>\n /**\n * Creates a model for image generation.\n * TODO: Implement for Dedicated users\n */\n imageModel(modelId: string & {}): ImageModelV2\n\n /**\n * Creates a model for transcription.\n * TODO: Implement for Dedicated users\n */\n transcription(modelId: string & {}): TranscriptionModelV2\n\n /**\n * Creates a model for speech generation.\n * TODO: Implement for Dedicated users\n */\n speech(modelId: string & {}): SpeechModelV2\n\n /**\n * Friendli-specific tools.\n */\n tools: typeof friendliTools\n}\n\n/**\nCreate an FriendliAI provider instance.\n */\nexport function createFriendli(\n options: FriendliAIProviderSettings = {},\n): FriendliAIProvider {\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'FRIENDLI_TOKEN',\n description: 'FRIENDLI_TOKEN',\n })}`,\n 'X-Friendli-Team': options.teamId,\n ...options.headers,\n })\n\n const baseURLAutoSelect = (\n modelId: string,\n baseURL:\n | string\n | 'dedicated'\n | 'serverless'\n | 'serverless-tools'\n | undefined,\n ): {\n baseURL: string\n type: 'dedicated' | 'serverless' | 'serverless-tools' | 'custom'\n } => {\n const FriendliBaseURL = {\n serverless: 'https://api.friendli.ai/serverless/v1',\n serverless_tools: 'https://api.friendli.ai/serverless/tools/v1',\n dedicated: 'https://api.friendli.ai/dedicated/v1',\n }\n\n // Ignore options if baseURL is specified\n const customBaseURL = withoutTrailingSlash(baseURL)\n if (\n typeof customBaseURL === 'string' &&\n customBaseURL !== 'dedicated' &&\n customBaseURL !== 'serverless' &&\n customBaseURL !== 'serverless-tools'\n ) {\n return { baseURL: customBaseURL, type: 'custom' }\n }\n\n switch (baseURL) {\n case 'dedicated':\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n case 'serverless':\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n case 'serverless-tools':\n return {\n baseURL: FriendliBaseURL.serverless_tools,\n type: 'serverless-tools',\n }\n default:\n if (\n FriendliAIServerlessModelIds.includes(\n modelId as FriendliAIServerlessModelId,\n )\n ) {\n return {\n baseURL: FriendliBaseURL.serverless,\n type: 'serverless',\n }\n } else {\n return {\n baseURL: FriendliBaseURL.dedicated,\n type: 'dedicated',\n }\n }\n }\n }\n\n const createLanguageModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new FriendliAIChatLanguageModel(modelId, {\n provider: `friendliai.${type}.chat`,\n url: ({ path }: { path: string }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n })\n }\n\n const createCompletionModel = (modelId: FriendliAILanguageModelId) => {\n const { baseURL, type } = baseURLAutoSelect(modelId, options.baseURL)\n\n return new OpenAICompatibleCompletionLanguageModel(modelId, {\n provider: `friendliai.${type}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: friendliaiErrorStructure,\n })\n }\n\n const createTextEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' })\n }\n const createImageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' })\n }\n const createTranscriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n const createSpeechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'languageModel' })\n }\n\n const provider = (modelId: FriendliAILanguageModelId) =>\n createLanguageModel(modelId)\n\n provider.languageModel = createLanguageModel\n provider.chat = createLanguageModel\n provider.completion = createCompletionModel\n\n // TODO: Implement for Dedicated users\n provider.embedding = createTextEmbeddingModel\n provider.textEmbeddingModel = createTextEmbeddingModel\n provider.imageModel = createImageModel\n provider.transcription = createTranscriptionModel\n provider.speech = createSpeechModel\n\n provider.tools = friendliTools\n\n return provider as FriendliAIProvider\n}\n\n/**\n * Default FriendliAI provider instance.\n */\nexport const friendli = createFriendli()\n","// https://friendli.ai/products/serverless-endpoints\n// Below is just a subset of the available models.\nexport const FriendliAIServerlessModelIds = [\n 'meta-llama-3.1-8b-instruct',\n 'meta-llama-3.3-70b-instruct',\n 'deepseek-r1',\n 'LGAI-EXAONE/EXAONE-4.0-32B',\n 'K-intelligence/Midm-2.0-Base-Instruct',\n 'K-intelligence/Midm-2.0-Mini-Instruct',\n 'deepseek-ai/DeepSeek-R1-0528',\n 'meta-llama/Llama-3.1-8B-Instruct',\n 'mistralai/Magistral-Small-2506',\n 'deepseek-ai/DeepSeek-R1',\n 'meta-llama/Llama-3.3-70B-Instruct',\n 'mistralai/Devstral-Small-2505',\n 'google/gemma-3-27b-it',\n 'Qwen/Qwen3-32B',\n 'meta-llama/Llama-4-Scout-17B-16E-Instruct',\n 'Qwen/Qwen3-235B-A22B',\n 'Qwen/Qwen3-30B-A3B',\n 'meta-llama/Llama-4-Maverick-17B-128E-Instruct',\n 'mistralai/Mistral-Small-3.1-24B-Instruct-2503',\n] as const\n\nexport type FriendliAIServerlessModelId =\n (typeof FriendliAIServerlessModelIds)[number]\n\nexport type FriendliAILanguageModelId =\n | FriendliAIServerlessModelId\n | (string & {})\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n // LanguageModelV2ResponseMetadata,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider'\nimport {\n FetchFunction,\n ParseResult,\n ResponseHandler,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n // parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils'\nimport {\n convertToOpenAICompatibleChatMessages,\n getResponseMetadata,\n mapOpenAICompatibleFinishReason,\n} from '@ai-sdk/openai-compatible/internal'\n\nimport { z } from 'zod'\n\nimport { FriendliAILanguageModelId } from './friendli-settings'\nimport {\n friendliaiErrorSchema,\n friendliaiErrorStructure,\n // friendliaiFailedResponseHandler,\n} from './friendli-error'\nimport { prepareTools } from './friendli-prepare-tools'\nimport {\n MetadataExtractor,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible'\n\nexport type OpenAICompatibleChatConfig = {\n provider: string\n headers: () => Record<string, string | undefined>\n url: (options: { modelId: string; path: string }) => string\n fetch?: FetchFunction\n includeUsage?: boolean\n errorStructure?: ProviderErrorStructure<z.infer<typeof friendliaiErrorSchema>>\n metadataExtractor?: MetadataExtractor\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls']\n}\n\nexport class FriendliAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2'\n\n readonly supportsStructuredOutputs: boolean\n\n readonly modelId: FriendliAILanguageModelId\n // readonly settings: FriendliAIChatSettings\n\n private readonly config: OpenAICompatibleChatConfig\n private readonly failedResponseHandler: ResponseHandler<APICallError>\n private readonly chunkSchema // type inferred via constructor\n\n constructor(\n modelId: FriendliAILanguageModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId\n // this.settings = settings\n this.config = config\n\n const errorStructure = friendliaiErrorStructure\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n )\n\n this.failedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n )\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? true\n }\n\n get provider(): string {\n return this.config.provider\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {}\n }\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n stream,\n }: Parameters<LanguageModelV2['doGenerate']>[0] & {\n stream: boolean\n }) {\n const warnings: LanguageModelV2CallWarning[] = []\n\n // Parse provider options\n // const compatibleOptions = Object.assign(\n // (await parseProviderOptions({\n // provider: 'openai-compatible',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // (await parseProviderOptions({\n // provider: 'friendliai',\n // providerOptions,\n // schema: openaiCompatibleProviderOptions,\n // })) ?? {},\n // )\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' })\n }\n\n const friendliOptions = await parseProviderOptions({\n provider: 'friendli',\n providerOptions,\n schema: friendliProviderOptionsSchema,\n })\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n })\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n })\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // user: compatibleOptions.user,\n\n // standardized settings:\n stream: stream,\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : friendliOptions?.regex != null\n ? {\n type: 'regex',\n schema: friendliOptions.regex,\n }\n : undefined,\n\n stop: stopSequences,\n seed,\n\n ...(friendliOptions?.chat_template_kwargs\n ? { chat_template_kwargs: friendliOptions.chat_template_kwargs }\n : {}),\n\n // ...providerOptions?.[this.providerOptionsName],\n\n // reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n parallel_tool_calls: friendliOptions?.parallelToolCalls,\n },\n warnings: [...warnings, ...toolWarnings],\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: false })\n\n const body = JSON.stringify(args)\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const choice = responseBody.choices[0]\n const content: Array<LanguageModelV2Content> = []\n\n // text content:\n const text = choice.message.content\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text })\n }\n\n // reasoning content:\n const reasoning = choice.message.reasoning_content\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n })\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n })\n }\n }\n\n // provider metadata:\n // const providerMetadata: SharedV2ProviderMetadata = {\n // [this.providerOptionsName]: {},\n // ...(await this.config.metadataExtractor?.extractMetadata?.({\n // parsedBody: rawResponse,\n // })),\n // }\n // const completionTokenDetails = responseBody.usage?.completion_tokens_details\n // if (completionTokenDetails?.accepted_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n // completionTokenDetails?.accepted_prediction_tokens\n // }\n // if (completionTokenDetails?.rejected_prediction_tokens != null) {\n // providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n // completionTokenDetails?.rejected_prediction_tokens\n // }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n // providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n }\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options, stream: true })\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n }\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor()\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n })\n\n const toolCalls: Array<{\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n hasFinished: boolean\n }> = []\n\n let finishReason: LanguageModelV2FinishReason = 'unknown'\n const usage: {\n completionTokens: number | undefined\n completionTokensDetails: {\n reasoningTokens: number | undefined\n acceptedPredictionTokens: number | undefined\n rejectedPredictionTokens: number | undefined\n }\n promptTokens: number | undefined\n promptTokensDetails: {\n cachedTokens: number | undefined\n }\n totalTokens: number | undefined\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n }\n let isFirstChunk = true\n // const providerOptionsName = this.providerOptionsName\n const providerOptionsName = 'friendliai'\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings })\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: chunk.error })\n return\n }\n const value = chunk.value\n\n metadataExtractor?.processChunk(chunk.rawValue)\n\n // // hosted tool execution case\n // if ('status' in value) {\n // switch (value.status) {\n // case 'STARTED':\n // break\n\n // case 'UPDATING':\n // break\n\n // case 'ENDED':\n // break\n\n // case 'ERRORED':\n // finishReason = 'error'\n // break\n\n // default:\n // finishReason = 'error'\n // controller.enqueue({\n // type: 'error',\n // error: new Error(\n // `Unsupported tool call status: ${value.status}`,\n // ),\n // })\n // }\n // return\n // }\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error'\n controller.enqueue({ type: 'error', error: value.error.message })\n return\n }\n\n if (isFirstChunk) {\n isFirstChunk = false\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n })\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage\n\n usage.promptTokens = prompt_tokens ?? undefined\n usage.completionTokens = completion_tokens ?? undefined\n usage.totalTokens = total_tokens ?? undefined\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens\n }\n }\n\n const choice = value.choices[0]\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n )\n }\n\n if (choice?.delta == null) {\n return\n }\n\n const delta = choice.delta\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning-delta',\n id: generateId(),\n delta: delta.reasoning_content,\n })\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n id: generateId(),\n delta: delta.content,\n })\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index\n // Tool call start. FriendliAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n })\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n })\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n })\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n }\n\n const toolCall = toolCalls[index]\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n })\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n\n continue\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index]\n\n if (toolCall.hasFinished) {\n continue\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? ''\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n })\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n })\n toolCall.hasFinished = true\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n })\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n }\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.union([z.string(), z.any()]).nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst friendliaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n })\n .nullish(),\n }),\n z.object({\n name: z.string(),\n status: z.enum(['ENDED', 'STARTED', 'ERRORED', 'UPDATING']),\n message: z.null(),\n parameters: z.array(\n z.object({\n name: z.string(),\n value: z.string(),\n }),\n ),\n result: z.string().nullable(),\n error: z\n .object({\n type: z.enum(['INVALID_PARAMETER', 'UNKNOWN']),\n msg: z.string(),\n })\n .nullable(),\n timestamp: z.number(),\n usage: z.null(),\n tool_call_id: z.string().nullable(), // temporary fix for \"file:text\" tool calls\n }),\n friendliaiErrorSchema,\n])\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish()\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n})\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ])\n\nconst friendliProviderOptionsSchema = z.object({\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().nullish(),\n\n /**\n * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.\n */\n // regex: z.instanceof(RegExp).nullish(),\n regex: z.string().nullish(),\n\n chat_template_kwargs: z.record(z.string(), z.any()).nullish(),\n})\n\nexport type FriendliProviderOptions = z.infer<\n typeof friendliProviderOptionsSchema\n>\n","import { z } from 'zod'\nimport { ProviderErrorStructure } from '@ai-sdk/openai-compatible'\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils'\n\nexport const friendliaiErrorSchema = z.object({\n message: z.string(),\n error: z.record(z.string(), z.any()),\n})\n\nexport type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>\n\nexport const friendliaiErrorStructure: ProviderErrorStructure<FriendliAIErrorData> =\n {\n errorSchema: friendliaiErrorSchema,\n errorToMessage: (data) => data.message,\n }\n\nexport const friendliaiFailedResponseHandler = createJsonErrorResponseHandler(\n friendliaiErrorStructure,\n)\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider'\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools']\n toolChoice?: LanguageModelV2CallOptions['toolChoice']\n}): {\n tools:\n | undefined\n | Array<{\n type: string\n files?: string[]\n }>\n | Array<{\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }>\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined\n toolWarnings: LanguageModelV2CallWarning[]\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined\n\n const toolWarnings: LanguageModelV2CallWarning[] = []\n\n if (tools == null) {\n // if (tools == null && hostedTools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings }\n }\n\n // const toolChoice = mode.toolChoice\n\n // const mappedTools: Array<{\n // type: 'function'\n // function: {\n // name: string\n // description: string | undefined\n // parameters: unknown\n // }\n // }> = []\n\n // if (tools) {\n // for (const tool of tools) {\n // if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n // } else {\n // mappedTools.push({\n // type: 'function',\n // function: {\n // name: tool.name,\n // description: tool.description,\n // parameters: tool.parameters,\n // },\n // })\n // }\n // }\n // }\n\n // const mappedHostedTools = hostedTools?.map((tool) => {\n // return {\n // type: tool.type,\n // }\n // })\n\n // if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n // }\n\n const openaiCompatTools: Array<\n | {\n type: 'function'\n function: {\n name: string\n description: string | undefined\n parameters: unknown\n }\n }\n | {\n type: string\n }\n > = []\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n // toolWarnings.push({ type: 'unsupported-tool', tool })\n\n openaiCompatTools.push({\n // NOTE: It would be better to use tool.name, but since \":\" is replaced with \"_\", the following code is used instead\n type: tool.id.split('.')[1] ?? 'unknown',\n })\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n })\n }\n }\n\n if (toolChoice == null) {\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: undefined,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings }\n }\n\n const type = toolChoice.type\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: type,\n // toolWarnings,\n // }\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings }\n case 'tool':\n // return {\n // tools: [...(mappedTools ?? []), ...(mappedHostedTools ?? [])],\n // tool_choice: {\n // type: 'function',\n // function: {\n // name: toolChoice.toolName,\n // },\n // },\n // toolWarnings,\n // }\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n }\n default: {\n const _exhaustiveCheck: never = type\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n })\n }\n }\n}\n","import { LanguageModelV2ProviderDefinedTool } from '@ai-sdk/provider'\n\nfunction webUrlBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:url',\n name: 'web:url',\n args: {},\n }\n}\n\nfunction webSearchBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.web:search',\n name: 'web:search',\n args: {},\n }\n}\n\nfunction mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calendar',\n name: 'math:calendar',\n args: {},\n }\n}\n\nfunction mathStatisticsBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:statistics',\n name: 'math:statistics',\n args: {},\n }\n}\n\nfunction mathCalculatorBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.math:calculator',\n name: 'math:calculator',\n args: {},\n }\n}\n\nfunction codePythonInterpreterBetaTool(): LanguageModelV2ProviderDefinedTool {\n return {\n type: 'provider-defined',\n id: 'friendli.code:python-interpreter',\n name: 'code:python-interpreter',\n args: {},\n }\n}\n\nexport const friendliTools = {\n webSearchBetaTool: webSearchBetaTool,\n webUrlBetaTool: webUrlBetaTool,\n mathCalendarBetaTool: mathCalendarBetaTool,\n mathStatisticsBetaTool: mathStatisticsBetaTool,\n mathCalculatorBetaTool: mathCalculatorBetaTool,\n codePythonInterpreterBetaTool: codePythonInterpreterBetaTool,\n}\n"],"mappings":";AAAA;AAAA,EAIE;AAAA,OAIK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,+CAA+C;;;ACZjD,IAAM,+BAA+B;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACtBA;AAAA,EAEE;AAAA,OAQK;AACP;AAAA,EAIE;AAAA,EACA;AAAA,EACA,kCAAAA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,KAAAC,UAAS;;;AC/BlB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAExC,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,IAAI,CAAC;AACrC,CAAC;AAIM,IAAM,2BACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC;AAEK,IAAM,kCAAkC;AAAA,EAC7C;AACF;;;ACnBA;AAAA,EAGE;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAyBE;AAlCF;AAoCE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AAEjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AA4CA,QAAM,oBAYF,CAAC;AAEL,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AAGpC,wBAAkB,KAAK;AAAA;AAAA,QAErB,OAAM,UAAK,GAAG,MAAM,GAAG,EAAE,CAAC,MAApB,YAAyB;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AAMtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAMH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AAWH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,8BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AFvGO,IAAM,8BAAN,MAA6D;AAAA;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAlElC;AAiFI,SAAK,UAAU;AAEf,SAAK,SAAS;AAEd,UAAM,iBAAiB;AACvB,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AAEA,SAAK,wBAAwBC;AAAA,MAC3B;AAAA,IACF;AAEA,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AArGtB;AAsGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EACA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEG;AAzHL;AA0HI,UAAM,WAAyC,CAAC;AAgBhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,kBAAkB,MAAM,qBAAqB;AAAA,MACjD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAMZ;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,KACxB,mDAAiB,UAAS,OACxB;AAAA,UACE,MAAM;AAAA,UACN,QAAQ,gBAAgB;AAAA,QAC1B,IACA;AAAA,QAER,MAAM;AAAA,QACN;AAAA,QAEA,IAAI,mDAAiB,wBACjB,EAAE,sBAAsB,gBAAgB,qBAAqB,IAC7D,CAAC;AAAA;AAAA;AAAA;AAAA,QAOL,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB,mDAAiB;AAAA,MACxC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtOjE;AAuOI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,MAAM,CAAC;AAE3E,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAmBA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA;AAAA,MAEA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAvU/D;AAwUI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAE1E,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,UAAM,sBAAsB;AAE5B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAzZvC,gBAAAC,KAAA;AA2ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AA8BtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,WAAW;AAAA,gBACf,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAxnB5B,gBAAAD,KAAA;AAynBY,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAaA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,+BAA+BE,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,MAAM,CAACA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAAE,QAAQ;AAAA,YACpD,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACxC,CAAC,EACA,QAAQ;AACb,CAAC;AAKD,IAAM,4BAA4BA,GAAE,MAAM;AAAA,EACxCA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACxC,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,OAAO;AAAA,IACf,QAAQA,GAAE,KAAK,CAAC,SAAS,WAAW,WAAW,UAAU,CAAC;AAAA,IAC1D,SAASA,GAAE,KAAK;AAAA,IAChB,YAAYA,GAAE;AAAA,MACZA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,IAC5B,OAAOA,GACJ,OAAO;AAAA,MACN,MAAMA,GAAE,KAAK,CAAC,qBAAqB,SAAS,CAAC;AAAA,MAC7C,KAAKA,GAAE,OAAO;AAAA,IAChB,CAAC,EACA,SAAS;AAAA,IACZ,WAAWA,GAAE,OAAO;AAAA,IACpB,OAAOA,GAAE,KAAK;AAAA,IACd,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EACpC,CAAC;AAAA,EACD;AACF,CAAC;AAED,IAAM,mCAAmCA,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAEH,IAAM,gCAAgCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI7C,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvC,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAE1B,sBAAsBA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,EAAE,QAAQ;AAC9D,CAAC;;;AGz2BD,SAAS,iBAAqD;AAC5D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,oBAAwD;AAC/D,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,uBAA2D;AAClE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,yBAA6D;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEA,SAAS,gCAAoE;AAC3E,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ALoCO,SAAS,eACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,mBAAmB,QAAQ;AAAA,IAC3B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,oBAAoB,CACxB,SACA,YASG;AACH,UAAM,kBAAkB;AAAA,MACtB,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,WAAW;AAAA,IACb;AAGA,UAAM,gBAAgB,qBAAqB,OAAO;AAClD,QACE,OAAO,kBAAkB,YACzB,kBAAkB,eAClB,kBAAkB,gBAClB,kBAAkB,oBAClB;AACA,aAAO,EAAE,SAAS,eAAe,MAAM,SAAS;AAAA,IAClD;AAEA,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,SAAS,gBAAgB;AAAA,UACzB,MAAM;AAAA,QACR;AAAA,MACF;AACE,YACE,6BAA6B;AAAA,UAC3B;AAAA,QACF,GACA;AACA,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS,gBAAgB;AAAA,YACzB,MAAM;AAAA,UACR;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,sBAAsB,CAAC,YAAuC;AAClE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,4BAA4B,SAAS;AAAA,MAC9C,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAwB,GAAG,OAAO,GAAG,IAAI;AAAA,MACtD,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAAC,YAAuC;AACpE,UAAM,EAAE,SAAS,KAAK,IAAI,kBAAkB,SAAS,QAAQ,OAAO;AAEpE,WAAO,IAAI,wCAAwC,SAAS;AAAA,MAC1D,UAAU,cAAc,IAAI;AAAA,MAC5B,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,MACf,gBAAgB;AAAA,IAClB,CAAC;AAAA,EACH;AAEA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AACA,QAAM,mBAAmB,CAAC,YAAoB;AAC5C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AACA,QAAM,2BAA2B,CAAC,YAAoB;AACpD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AACA,QAAM,oBAAoB,CAAC,YAAoB;AAC7C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,gBAAgB,CAAC;AAAA,EACpE;AAEA,QAAM,WAAW,CAAC,YAChB,oBAAoB,OAAO;AAE7B,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AAGtB,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAElB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,WAAW,eAAe;","names":["createJsonErrorResponseHandler","z","createJsonErrorResponseHandler","_a","toolCall","z"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friendliai/ai-provider",
|
|
3
|
-
"version": "0.3.0-
|
|
3
|
+
"version": "0.3.0-beta.5",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -25,17 +25,17 @@
|
|
|
25
25
|
}
|
|
26
26
|
},
|
|
27
27
|
"dependencies": {
|
|
28
|
-
"@ai-sdk/openai-compatible": "1.0.
|
|
29
|
-
"@ai-sdk/provider": "2.0.0
|
|
30
|
-
"@ai-sdk/provider-utils": "3.0.
|
|
28
|
+
"@ai-sdk/openai-compatible": "1.0.8",
|
|
29
|
+
"@ai-sdk/provider": "2.0.0",
|
|
30
|
+
"@ai-sdk/provider-utils": "3.0.3"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
|
33
33
|
"@edge-runtime/vm": "^5.0.0",
|
|
34
|
-
"@types/node": "^
|
|
35
|
-
"globals": "^16.
|
|
34
|
+
"@types/node": "^24.3.0",
|
|
35
|
+
"globals": "^16.3.0",
|
|
36
36
|
"tsup": "^8.5.0",
|
|
37
|
-
"typescript": "5.
|
|
38
|
-
"zod": "
|
|
37
|
+
"typescript": "5.9.2",
|
|
38
|
+
"zod": "4.0.17"
|
|
39
39
|
},
|
|
40
40
|
"peerDependencies": {
|
|
41
41
|
"zod": "^3.0.0"
|