@friendliai/ai-provider 1.0.0-beta.0 → 1.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +37 -0
- package/README.md +20 -0
- package/dist/index.d.mts +69 -31
- package/dist/index.d.ts +69 -31
- package/dist/index.js +347 -197
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +333 -184
- package/dist/index.mjs.map +1 -1
- package/package.json +9 -8
package/dist/index.mjs
CHANGED
|
@@ -17,7 +17,6 @@ import {
|
|
|
17
17
|
import {
|
|
18
18
|
combineHeaders,
|
|
19
19
|
createEventSourceResponseHandler,
|
|
20
|
-
createJsonErrorResponseHandler as createJsonErrorResponseHandler2,
|
|
21
20
|
createJsonResponseHandler,
|
|
22
21
|
generateId,
|
|
23
22
|
isParsableJson,
|
|
@@ -27,17 +26,88 @@ import {
|
|
|
27
26
|
import { z as z2 } from "zod/v4";
|
|
28
27
|
|
|
29
28
|
// src/friendli-error.ts
|
|
30
|
-
import {
|
|
29
|
+
import { APICallError } from "@ai-sdk/provider";
|
|
30
|
+
import { safeParseJSON } from "@ai-sdk/provider-utils";
|
|
31
31
|
import { z } from "zod/v4";
|
|
32
|
-
var
|
|
32
|
+
var friendliErrorResponseSchema = z.object({
|
|
33
33
|
message: z.string(),
|
|
34
|
-
error: z.record(z.string(), z.any())
|
|
34
|
+
error: z.record(z.string(), z.any()).optional()
|
|
35
35
|
});
|
|
36
|
+
var openAIStyleErrorResponseSchema = z.object({
|
|
37
|
+
error: z.object({
|
|
38
|
+
message: z.string()
|
|
39
|
+
}).loose()
|
|
40
|
+
}).loose();
|
|
41
|
+
var friendliaiErrorSchema = z.union([
|
|
42
|
+
// OpenAI/OpenRouter style error: { "error": { "message": "..." } }
|
|
43
|
+
openAIStyleErrorResponseSchema,
|
|
44
|
+
// Friendli style error: { "message": "...", "error": { ... } }
|
|
45
|
+
friendliErrorResponseSchema
|
|
46
|
+
]);
|
|
36
47
|
var friendliaiErrorStructure = {
|
|
37
48
|
errorSchema: friendliaiErrorSchema,
|
|
38
|
-
errorToMessage: (data) =>
|
|
49
|
+
errorToMessage: (data) => {
|
|
50
|
+
if (typeof data === "object" && data != null && "error" in data && typeof data.error === "object" && data.error != null && "message" in data.error && typeof data.error.message === "string") {
|
|
51
|
+
return data.error.message;
|
|
52
|
+
}
|
|
53
|
+
if (typeof data === "object" && data != null && "message" in data && typeof data.message === "string") {
|
|
54
|
+
return data.message;
|
|
55
|
+
}
|
|
56
|
+
return "Unknown error";
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
var friendliaiFailedResponseHandler = async ({
|
|
60
|
+
response,
|
|
61
|
+
url,
|
|
62
|
+
requestBodyValues
|
|
63
|
+
}) => {
|
|
64
|
+
const responseBody = await response.text();
|
|
65
|
+
const responseHeaders = {};
|
|
66
|
+
response.headers.forEach((value, key) => {
|
|
67
|
+
responseHeaders[key] = value;
|
|
68
|
+
});
|
|
69
|
+
const baseErrorOptions = {
|
|
70
|
+
url,
|
|
71
|
+
requestBodyValues,
|
|
72
|
+
statusCode: response.status,
|
|
73
|
+
responseHeaders,
|
|
74
|
+
responseBody
|
|
75
|
+
};
|
|
76
|
+
const trimmedBody = responseBody.trim();
|
|
77
|
+
if (trimmedBody === "") {
|
|
78
|
+
const fallback2 = response.statusText || `Request failed with status ${response.status}`;
|
|
79
|
+
return {
|
|
80
|
+
responseHeaders,
|
|
81
|
+
value: new APICallError({
|
|
82
|
+
message: fallback2,
|
|
83
|
+
...baseErrorOptions
|
|
84
|
+
})
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
const parsedError = await safeParseJSON({
|
|
88
|
+
text: responseBody,
|
|
89
|
+
schema: friendliaiErrorSchema
|
|
90
|
+
});
|
|
91
|
+
if (parsedError.success) {
|
|
92
|
+
return {
|
|
93
|
+
responseHeaders,
|
|
94
|
+
value: new APICallError({
|
|
95
|
+
message: friendliaiErrorStructure.errorToMessage(parsedError.value),
|
|
96
|
+
data: parsedError.value,
|
|
97
|
+
...baseErrorOptions
|
|
98
|
+
})
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
const fallback = trimmedBody || response.statusText || `Request failed with status ${response.status}`;
|
|
102
|
+
return {
|
|
103
|
+
responseHeaders,
|
|
104
|
+
value: new APICallError({
|
|
105
|
+
message: fallback,
|
|
106
|
+
cause: parsedError.error,
|
|
107
|
+
...baseErrorOptions
|
|
108
|
+
})
|
|
109
|
+
};
|
|
39
110
|
};
|
|
40
|
-
var friendliaiFailedResponseHandler = createJsonErrorResponseHandler(friendliaiErrorStructure);
|
|
41
111
|
|
|
42
112
|
// src/friendli-prepare-tools.ts
|
|
43
113
|
import {
|
|
@@ -55,9 +125,11 @@ function prepareTools({
|
|
|
55
125
|
}
|
|
56
126
|
const openaiCompatTools = [];
|
|
57
127
|
for (const tool of tools) {
|
|
58
|
-
if (tool.type === "provider
|
|
128
|
+
if (tool.type === "provider") {
|
|
59
129
|
openaiCompatTools.push({
|
|
60
|
-
// NOTE:
|
|
130
|
+
// NOTE: Friendli tool-assisted API expects provider tool types like "web:search".
|
|
131
|
+
// We derive it from the provider tool id (e.g. "friendli.web:search" -> "web:search")
|
|
132
|
+
// instead of tool.name (often "web_search").
|
|
61
133
|
type: (_a = tool.id.split(".")[1]) != null ? _a : "unknown"
|
|
62
134
|
});
|
|
63
135
|
} else {
|
|
@@ -99,16 +171,38 @@ function prepareTools({
|
|
|
99
171
|
}
|
|
100
172
|
|
|
101
173
|
// src/friendli-chat-language-model.ts
|
|
174
|
+
function isRecord(value) {
|
|
175
|
+
return typeof value === "object" && value != null;
|
|
176
|
+
}
|
|
177
|
+
function isHostedToolExecutionChunk(value) {
|
|
178
|
+
if (!isRecord(value)) return false;
|
|
179
|
+
return typeof value.status === "string" && typeof value.name === "string" && Array.isArray(value.parameters);
|
|
180
|
+
}
|
|
181
|
+
function getChunkErrorMessage(value) {
|
|
182
|
+
if (!isRecord(value)) return void 0;
|
|
183
|
+
if (typeof value.message === "string") {
|
|
184
|
+
return value.message;
|
|
185
|
+
}
|
|
186
|
+
const nestedError = value.error;
|
|
187
|
+
if (isRecord(nestedError) && typeof nestedError.message === "string") {
|
|
188
|
+
return nestedError.message;
|
|
189
|
+
}
|
|
190
|
+
return void 0;
|
|
191
|
+
}
|
|
192
|
+
function isOpenAIChatChunk(value) {
|
|
193
|
+
if (!isRecord(value)) return false;
|
|
194
|
+
return Array.isArray(value.choices);
|
|
195
|
+
}
|
|
102
196
|
var FriendliAIChatLanguageModel = class {
|
|
103
197
|
// type inferred via constructor
|
|
104
198
|
constructor(modelId, config) {
|
|
105
|
-
this.specificationVersion = "
|
|
199
|
+
this.specificationVersion = "v3";
|
|
106
200
|
var _a;
|
|
107
201
|
this.modelId = modelId;
|
|
108
202
|
this.config = config;
|
|
109
203
|
const errorStructure = friendliaiErrorStructure;
|
|
110
204
|
this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
|
|
111
|
-
this.failedResponseHandler =
|
|
205
|
+
this.failedResponseHandler = friendliaiFailedResponseHandler;
|
|
112
206
|
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
|
|
113
207
|
}
|
|
114
208
|
get provider() {
|
|
@@ -136,18 +230,24 @@ var FriendliAIChatLanguageModel = class {
|
|
|
136
230
|
}) {
|
|
137
231
|
var _a;
|
|
138
232
|
const warnings = [];
|
|
139
|
-
if (topK != null) {
|
|
140
|
-
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
141
|
-
}
|
|
142
233
|
const friendliOptions = await parseProviderOptions({
|
|
234
|
+
provider: "friendliai",
|
|
235
|
+
providerOptions,
|
|
236
|
+
schema: friendliProviderOptionsSchema
|
|
237
|
+
});
|
|
238
|
+
const legacyFriendliOptions = await parseProviderOptions({
|
|
143
239
|
provider: "friendli",
|
|
144
240
|
providerOptions,
|
|
145
241
|
schema: friendliProviderOptionsSchema
|
|
146
242
|
});
|
|
243
|
+
const options = {
|
|
244
|
+
...legacyFriendliOptions,
|
|
245
|
+
...friendliOptions
|
|
246
|
+
};
|
|
147
247
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
148
248
|
warnings.push({
|
|
149
|
-
type: "unsupported
|
|
150
|
-
|
|
249
|
+
type: "unsupported",
|
|
250
|
+
feature: "responseFormat",
|
|
151
251
|
details: "JSON response format schema is only supported with structuredOutputs"
|
|
152
252
|
});
|
|
153
253
|
}
|
|
@@ -159,6 +259,14 @@ var FriendliAIChatLanguageModel = class {
|
|
|
159
259
|
tools,
|
|
160
260
|
toolChoice
|
|
161
261
|
});
|
|
262
|
+
const isToolsPresent = openaiTools != null && openaiTools.length > 0;
|
|
263
|
+
if (isToolsPresent && (responseFormat != null || (options == null ? void 0 : options.regex) != null)) {
|
|
264
|
+
warnings.push({
|
|
265
|
+
type: "unsupported",
|
|
266
|
+
feature: "responseFormat",
|
|
267
|
+
details: "response_format is not supported when tools are present."
|
|
268
|
+
});
|
|
269
|
+
}
|
|
162
270
|
return {
|
|
163
271
|
args: {
|
|
164
272
|
// >>> hard-coded default options >>>
|
|
@@ -170,36 +278,39 @@ var FriendliAIChatLanguageModel = class {
|
|
|
170
278
|
max_tokens: maxOutputTokens,
|
|
171
279
|
temperature,
|
|
172
280
|
top_p: topP,
|
|
281
|
+
top_k: topK,
|
|
173
282
|
frequency_penalty: frequencyPenalty,
|
|
174
283
|
presence_penalty: presencePenalty,
|
|
175
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
284
|
+
response_format: isToolsPresent === false ? (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
176
285
|
type: "json_schema",
|
|
177
286
|
json_schema: {
|
|
178
287
|
schema: responseFormat.schema,
|
|
179
288
|
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
180
289
|
description: responseFormat.description
|
|
181
290
|
}
|
|
182
|
-
} : { type: "json_object" } : (
|
|
291
|
+
} : { type: "json_object" } : (options == null ? void 0 : options.regex) != null ? {
|
|
183
292
|
type: "regex",
|
|
184
|
-
schema:
|
|
185
|
-
} : void 0,
|
|
293
|
+
schema: options.regex
|
|
294
|
+
} : void 0 : void 0,
|
|
186
295
|
stop: stopSequences,
|
|
187
296
|
seed,
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
297
|
+
min_p: options == null ? void 0 : options.minP,
|
|
298
|
+
repetition_penalty: options == null ? void 0 : options.repetitionPenalty,
|
|
299
|
+
xtc_threshold: options == null ? void 0 : options.xtcThreshold,
|
|
300
|
+
xtc_probability: options == null ? void 0 : options.xtcProbability,
|
|
301
|
+
...(options == null ? void 0 : options.chat_template_kwargs) ? { chat_template_kwargs: options.chat_template_kwargs } : {},
|
|
191
302
|
// messages:
|
|
192
303
|
messages: convertToOpenAICompatibleChatMessages(prompt),
|
|
193
304
|
// tools:
|
|
194
305
|
tools: openaiTools,
|
|
195
306
|
tool_choice: openaiToolChoice,
|
|
196
|
-
parallel_tool_calls:
|
|
307
|
+
parallel_tool_calls: options == null ? void 0 : options.parallelToolCalls
|
|
197
308
|
},
|
|
198
309
|
warnings: [...warnings, ...toolWarnings]
|
|
199
310
|
};
|
|
200
311
|
}
|
|
201
312
|
async doGenerate(options) {
|
|
202
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k
|
|
313
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
203
314
|
const { args, warnings } = await this.getArgs({ ...options, stream: false });
|
|
204
315
|
const body = JSON.stringify(args);
|
|
205
316
|
const {
|
|
@@ -245,11 +356,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
245
356
|
content,
|
|
246
357
|
finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
|
|
247
358
|
usage: {
|
|
248
|
-
inputTokens:
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
359
|
+
inputTokens: {
|
|
360
|
+
total: (_c = (_b = responseBody.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : void 0,
|
|
361
|
+
noCache: void 0,
|
|
362
|
+
cacheRead: (_f = (_e = (_d = responseBody.usage) == null ? void 0 : _d.prompt_tokens_details) == null ? void 0 : _e.cached_tokens) != null ? _f : void 0,
|
|
363
|
+
cacheWrite: void 0
|
|
364
|
+
},
|
|
365
|
+
outputTokens: {
|
|
366
|
+
total: (_h = (_g = responseBody.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : void 0,
|
|
367
|
+
text: void 0,
|
|
368
|
+
reasoning: (_k = (_j = (_i = responseBody.usage) == null ? void 0 : _i.completion_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0
|
|
369
|
+
}
|
|
253
370
|
},
|
|
254
371
|
// providerMetadata,
|
|
255
372
|
request: { body },
|
|
@@ -306,9 +423,10 @@ var FriendliAIChatLanguageModel = class {
|
|
|
306
423
|
start(controller) {
|
|
307
424
|
controller.enqueue({ type: "stream-start", warnings });
|
|
308
425
|
},
|
|
309
|
-
//
|
|
426
|
+
// NOTE: Chunk values can contain OpenAI-compatible deltas, hosted tool events, and error events.
|
|
427
|
+
// We narrow with type guards for safe handling.
|
|
310
428
|
transform(chunk, controller) {
|
|
311
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
429
|
+
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
312
430
|
if (!chunk.success) {
|
|
313
431
|
finishReason = "error";
|
|
314
432
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -316,26 +434,79 @@ var FriendliAIChatLanguageModel = class {
|
|
|
316
434
|
}
|
|
317
435
|
const value = chunk.value;
|
|
318
436
|
metadataExtractor == null ? void 0 : metadataExtractor.processChunk(chunk.rawValue);
|
|
319
|
-
if (
|
|
437
|
+
if (isHostedToolExecutionChunk(value)) {
|
|
438
|
+
const toolCallId = (_a2 = value.tool_call_id) != null ? _a2 : generateId();
|
|
439
|
+
switch (value.status) {
|
|
440
|
+
case "STARTED":
|
|
441
|
+
controller.enqueue({
|
|
442
|
+
type: "tool-call",
|
|
443
|
+
toolCallId,
|
|
444
|
+
toolName: value.name,
|
|
445
|
+
input: JSON.stringify(
|
|
446
|
+
Object.fromEntries(value.parameters.map((p) => [p.name, p.value]))
|
|
447
|
+
),
|
|
448
|
+
providerExecuted: true
|
|
449
|
+
});
|
|
450
|
+
break;
|
|
451
|
+
case "UPDATING":
|
|
452
|
+
break;
|
|
453
|
+
case "ENDED":
|
|
454
|
+
controller.enqueue({
|
|
455
|
+
type: "tool-result",
|
|
456
|
+
toolCallId,
|
|
457
|
+
toolName: value.name,
|
|
458
|
+
result: (_b = value.result) != null ? _b : ""
|
|
459
|
+
});
|
|
460
|
+
break;
|
|
461
|
+
case "ERRORED":
|
|
462
|
+
finishReason = "error";
|
|
463
|
+
controller.enqueue({
|
|
464
|
+
type: "tool-result",
|
|
465
|
+
toolCallId,
|
|
466
|
+
toolName: value.name,
|
|
467
|
+
result: (_d = (_c = value.error) == null ? void 0 : _c.msg) != null ? _d : "Unknown error",
|
|
468
|
+
isError: true
|
|
469
|
+
});
|
|
470
|
+
break;
|
|
471
|
+
default:
|
|
472
|
+
finishReason = "error";
|
|
473
|
+
controller.enqueue({
|
|
474
|
+
type: "error",
|
|
475
|
+
error: new Error(`Unsupported tool call status: ${value.status}`)
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
return;
|
|
479
|
+
}
|
|
480
|
+
const chunkErrorMessage = getChunkErrorMessage(value);
|
|
481
|
+
if (chunkErrorMessage != null) {
|
|
320
482
|
finishReason = "error";
|
|
321
|
-
controller.enqueue({ type: "error", error:
|
|
483
|
+
controller.enqueue({ type: "error", error: chunkErrorMessage });
|
|
484
|
+
return;
|
|
485
|
+
}
|
|
486
|
+
if (!isOpenAIChatChunk(value)) {
|
|
487
|
+
finishReason = "error";
|
|
488
|
+
controller.enqueue({
|
|
489
|
+
type: "error",
|
|
490
|
+
error: new Error("Unsupported chunk shape")
|
|
491
|
+
});
|
|
322
492
|
return;
|
|
323
493
|
}
|
|
494
|
+
const chunkValue = value;
|
|
324
495
|
if (isFirstChunk) {
|
|
325
496
|
isFirstChunk = false;
|
|
326
497
|
controller.enqueue({
|
|
327
498
|
type: "response-metadata",
|
|
328
|
-
...getResponseMetadata(
|
|
499
|
+
...getResponseMetadata(chunkValue)
|
|
329
500
|
});
|
|
330
501
|
}
|
|
331
|
-
if (
|
|
502
|
+
if (chunkValue.usage != null) {
|
|
332
503
|
const {
|
|
333
504
|
prompt_tokens,
|
|
334
505
|
completion_tokens,
|
|
335
506
|
total_tokens,
|
|
336
507
|
prompt_tokens_details,
|
|
337
508
|
completion_tokens_details
|
|
338
|
-
} =
|
|
509
|
+
} = chunkValue.usage;
|
|
339
510
|
usage.promptTokens = prompt_tokens != null ? prompt_tokens : void 0;
|
|
340
511
|
usage.completionTokens = completion_tokens != null ? completion_tokens : void 0;
|
|
341
512
|
usage.totalTokens = total_tokens != null ? total_tokens : void 0;
|
|
@@ -352,7 +523,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
352
523
|
usage.promptTokensDetails.cachedTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
|
|
353
524
|
}
|
|
354
525
|
}
|
|
355
|
-
const choice =
|
|
526
|
+
const choice = chunkValue.choices[0];
|
|
356
527
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
357
528
|
finishReason = mapOpenAICompatibleFinishReason(choice.finish_reason);
|
|
358
529
|
}
|
|
@@ -390,7 +561,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
390
561
|
message: `Expected 'id' to be a string.`
|
|
391
562
|
});
|
|
392
563
|
}
|
|
393
|
-
if (((
|
|
564
|
+
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
|
|
394
565
|
throw new InvalidResponseDataError({
|
|
395
566
|
data: toolCallDelta,
|
|
396
567
|
message: `Expected 'function.name' to be a string.`
|
|
@@ -401,12 +572,12 @@ var FriendliAIChatLanguageModel = class {
|
|
|
401
572
|
type: "function",
|
|
402
573
|
function: {
|
|
403
574
|
name: toolCallDelta.function.name,
|
|
404
|
-
arguments: (
|
|
575
|
+
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
|
|
405
576
|
},
|
|
406
577
|
hasFinished: false
|
|
407
578
|
};
|
|
408
579
|
const toolCall2 = toolCalls[index];
|
|
409
|
-
if (((
|
|
580
|
+
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
|
|
410
581
|
if (toolCall2.function.arguments.length > 0) {
|
|
411
582
|
controller.enqueue({
|
|
412
583
|
type: "tool-input-delta",
|
|
@@ -417,7 +588,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
417
588
|
if (isParsableJson(toolCall2.function.arguments)) {
|
|
418
589
|
controller.enqueue({
|
|
419
590
|
type: "tool-call",
|
|
420
|
-
toolCallId: (
|
|
591
|
+
toolCallId: (_i = toolCall2.id) != null ? _i : generateId(),
|
|
421
592
|
toolName: toolCall2.function.name,
|
|
422
593
|
input: toolCall2.function.arguments
|
|
423
594
|
});
|
|
@@ -430,18 +601,18 @@ var FriendliAIChatLanguageModel = class {
|
|
|
430
601
|
if (toolCall.hasFinished) {
|
|
431
602
|
continue;
|
|
432
603
|
}
|
|
433
|
-
if (((
|
|
434
|
-
toolCall.function.arguments += (
|
|
604
|
+
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
|
|
605
|
+
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
|
|
435
606
|
}
|
|
436
607
|
controller.enqueue({
|
|
437
608
|
type: "tool-input-delta",
|
|
438
609
|
id: toolCall.id,
|
|
439
|
-
delta: (
|
|
610
|
+
delta: (_n = (_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null ? _n : ""
|
|
440
611
|
});
|
|
441
|
-
if (((
|
|
612
|
+
if (((_o = toolCall.function) == null ? void 0 : _o.name) != null && ((_p = toolCall.function) == null ? void 0 : _p.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
442
613
|
controller.enqueue({
|
|
443
614
|
type: "tool-call",
|
|
444
|
-
toolCallId: (
|
|
615
|
+
toolCallId: (_q = toolCall.id) != null ? _q : generateId(),
|
|
445
616
|
toolName: toolCall.function.name,
|
|
446
617
|
input: toolCall.function.arguments
|
|
447
618
|
});
|
|
@@ -451,7 +622,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
451
622
|
}
|
|
452
623
|
},
|
|
453
624
|
flush(controller) {
|
|
454
|
-
var _a2, _b, _c, _d
|
|
625
|
+
var _a2, _b, _c, _d;
|
|
455
626
|
const providerMetadata = {
|
|
456
627
|
[providerOptionsName]: {},
|
|
457
628
|
...metadataExtractor == null ? void 0 : metadataExtractor.buildMetadata()
|
|
@@ -466,11 +637,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
466
637
|
type: "finish",
|
|
467
638
|
finishReason,
|
|
468
639
|
usage: {
|
|
469
|
-
inputTokens:
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
640
|
+
inputTokens: {
|
|
641
|
+
total: (_a2 = usage.promptTokens) != null ? _a2 : void 0,
|
|
642
|
+
noCache: void 0,
|
|
643
|
+
cacheRead: (_b = usage.promptTokensDetails.cachedTokens) != null ? _b : void 0,
|
|
644
|
+
cacheWrite: void 0
|
|
645
|
+
},
|
|
646
|
+
outputTokens: {
|
|
647
|
+
total: (_c = usage.completionTokens) != null ? _c : void 0,
|
|
648
|
+
text: void 0,
|
|
649
|
+
reasoning: (_d = usage.completionTokensDetails.reasoningTokens) != null ? _d : void 0
|
|
650
|
+
}
|
|
474
651
|
},
|
|
475
652
|
providerMetadata
|
|
476
653
|
});
|
|
@@ -482,86 +659,6 @@ var FriendliAIChatLanguageModel = class {
|
|
|
482
659
|
};
|
|
483
660
|
}
|
|
484
661
|
};
|
|
485
|
-
var friendliAIChatResponseSchema = z2.object({
|
|
486
|
-
id: z2.string().nullish(),
|
|
487
|
-
created: z2.number().nullish(),
|
|
488
|
-
model: z2.string().nullish(),
|
|
489
|
-
choices: z2.array(
|
|
490
|
-
z2.object({
|
|
491
|
-
message: z2.object({
|
|
492
|
-
role: z2.literal("assistant").nullish(),
|
|
493
|
-
content: z2.string().nullish(),
|
|
494
|
-
tool_calls: z2.array(
|
|
495
|
-
z2.object({
|
|
496
|
-
id: z2.string().nullish(),
|
|
497
|
-
type: z2.literal("function"),
|
|
498
|
-
function: z2.object({
|
|
499
|
-
name: z2.string(),
|
|
500
|
-
arguments: z2.union([z2.string(), z2.any()]).nullish()
|
|
501
|
-
})
|
|
502
|
-
})
|
|
503
|
-
).nullish()
|
|
504
|
-
}),
|
|
505
|
-
finish_reason: z2.string().nullish()
|
|
506
|
-
})
|
|
507
|
-
),
|
|
508
|
-
usage: z2.object({
|
|
509
|
-
prompt_tokens: z2.number().nullish(),
|
|
510
|
-
completion_tokens: z2.number().nullish()
|
|
511
|
-
}).nullish()
|
|
512
|
-
});
|
|
513
|
-
var friendliaiChatChunkSchema = z2.union([
|
|
514
|
-
z2.object({
|
|
515
|
-
id: z2.string().nullish(),
|
|
516
|
-
created: z2.number().nullish(),
|
|
517
|
-
model: z2.string().nullish(),
|
|
518
|
-
choices: z2.array(
|
|
519
|
-
z2.object({
|
|
520
|
-
delta: z2.object({
|
|
521
|
-
role: z2.enum(["assistant"]).nullish(),
|
|
522
|
-
content: z2.string().nullish(),
|
|
523
|
-
tool_calls: z2.array(
|
|
524
|
-
z2.object({
|
|
525
|
-
index: z2.number(),
|
|
526
|
-
id: z2.string().nullish(),
|
|
527
|
-
type: z2.literal("function").optional(),
|
|
528
|
-
function: z2.object({
|
|
529
|
-
name: z2.string().nullish(),
|
|
530
|
-
arguments: z2.string().nullish()
|
|
531
|
-
})
|
|
532
|
-
})
|
|
533
|
-
).nullish()
|
|
534
|
-
}).nullish(),
|
|
535
|
-
finish_reason: z2.string().nullish()
|
|
536
|
-
})
|
|
537
|
-
),
|
|
538
|
-
usage: z2.object({
|
|
539
|
-
prompt_tokens: z2.number().nullish(),
|
|
540
|
-
completion_tokens: z2.number().nullish()
|
|
541
|
-
}).nullish()
|
|
542
|
-
}),
|
|
543
|
-
z2.object({
|
|
544
|
-
name: z2.string(),
|
|
545
|
-
status: z2.enum(["ENDED", "STARTED", "ERRORED", "UPDATING"]),
|
|
546
|
-
message: z2.null(),
|
|
547
|
-
parameters: z2.array(
|
|
548
|
-
z2.object({
|
|
549
|
-
name: z2.string(),
|
|
550
|
-
value: z2.string()
|
|
551
|
-
})
|
|
552
|
-
),
|
|
553
|
-
result: z2.string().nullable(),
|
|
554
|
-
error: z2.object({
|
|
555
|
-
type: z2.enum(["INVALID_PARAMETER", "UNKNOWN"]),
|
|
556
|
-
msg: z2.string()
|
|
557
|
-
}).nullable(),
|
|
558
|
-
timestamp: z2.number(),
|
|
559
|
-
usage: z2.null(),
|
|
560
|
-
tool_call_id: z2.string().nullable()
|
|
561
|
-
// temporary fix for "file:text" tool calls
|
|
562
|
-
}),
|
|
563
|
-
friendliaiErrorSchema
|
|
564
|
-
]);
|
|
565
662
|
var openaiCompatibleTokenUsageSchema = z2.object({
|
|
566
663
|
prompt_tokens: z2.number().nullish(),
|
|
567
664
|
completion_tokens: z2.number().nullish(),
|
|
@@ -629,6 +726,25 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z2.union([
|
|
|
629
726
|
),
|
|
630
727
|
usage: openaiCompatibleTokenUsageSchema
|
|
631
728
|
}),
|
|
729
|
+
z2.object({
|
|
730
|
+
name: z2.string(),
|
|
731
|
+
status: z2.enum(["ENDED", "STARTED", "ERRORED", "UPDATING"]),
|
|
732
|
+
message: z2.null(),
|
|
733
|
+
parameters: z2.array(
|
|
734
|
+
z2.object({
|
|
735
|
+
name: z2.string(),
|
|
736
|
+
value: z2.string()
|
|
737
|
+
})
|
|
738
|
+
),
|
|
739
|
+
result: z2.string().nullable(),
|
|
740
|
+
error: z2.object({
|
|
741
|
+
type: z2.enum(["INVALID_PARAMETER", "UNKNOWN"]),
|
|
742
|
+
msg: z2.string()
|
|
743
|
+
}).nullable(),
|
|
744
|
+
timestamp: z2.number(),
|
|
745
|
+
usage: z2.null(),
|
|
746
|
+
tool_call_id: z2.string().nullable()
|
|
747
|
+
}),
|
|
632
748
|
errorSchema
|
|
633
749
|
]);
|
|
634
750
|
var friendliProviderOptionsSchema = z2.object({
|
|
@@ -641,15 +757,34 @@ var friendliProviderOptionsSchema = z2.object({
|
|
|
641
757
|
*/
|
|
642
758
|
// regex: z.instanceof(RegExp).nullish(),
|
|
643
759
|
regex: z2.string().nullish(),
|
|
644
|
-
chat_template_kwargs: z2.record(z2.string(), z2.any()).nullish()
|
|
760
|
+
chat_template_kwargs: z2.record(z2.string(), z2.any()).nullish(),
|
|
761
|
+
/**
|
|
762
|
+
* A scaling factor used to determine the minimum token probability threshold.
|
|
763
|
+
*/
|
|
764
|
+
minP: z2.number().nullish(),
|
|
765
|
+
/**
|
|
766
|
+
* Penalizes tokens that have already appeared in the generated result.
|
|
767
|
+
*/
|
|
768
|
+
repetitionPenalty: z2.number().nullish(),
|
|
769
|
+
/**
|
|
770
|
+
* A probability threshold used to identify “top choice” tokens for exclusion in XTC sampling.
|
|
771
|
+
*/
|
|
772
|
+
xtcThreshold: z2.number().nullish(),
|
|
773
|
+
/**
|
|
774
|
+
* The probability that XTC (Exclude Top Choices) filtering will be applied for each sampling decision.
|
|
775
|
+
*/
|
|
776
|
+
xtcProbability: z2.number().nullish()
|
|
645
777
|
});
|
|
646
778
|
|
|
647
779
|
// src/friendli-settings.ts
|
|
648
780
|
var FriendliAIServerlessModelIds = [
|
|
781
|
+
"MiniMaxAI/MiniMax-M2",
|
|
782
|
+
"zai-org/GLM-4.6",
|
|
649
783
|
"LGAI-EXAONE/EXAONE-4.0.1-32B",
|
|
650
784
|
"skt/A.X-4.0",
|
|
651
785
|
"skt/A.X-3.1",
|
|
652
786
|
"naver-hyperclovax/HyperCLOVAX-SEED-Think-14B",
|
|
787
|
+
"deepseek-ai/DeepSeek-V3.1",
|
|
653
788
|
"deepseek-ai/DeepSeek-R1-0528",
|
|
654
789
|
"meta-llama/Llama-4-Maverick-17B-128E-Instruct",
|
|
655
790
|
"meta-llama/Llama-4-Scout-17B-16E-Instruct",
|
|
@@ -668,61 +803,74 @@ var FriendliAIServerlessModelIds = [
|
|
|
668
803
|
];
|
|
669
804
|
|
|
670
805
|
// src/friendli-tools.ts
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
806
|
+
import { createProviderToolFactoryWithOutputSchema } from "@ai-sdk/provider-utils";
|
|
807
|
+
import { z as z3 } from "zod/v4";
|
|
808
|
+
var inputSchema = z3.object({}).loose();
|
|
809
|
+
var outputSchema = z3.unknown();
|
|
810
|
+
var webSearchTool = createProviderToolFactoryWithOutputSchema({
|
|
811
|
+
id: "friendli.web:search",
|
|
812
|
+
inputSchema,
|
|
813
|
+
outputSchema
|
|
814
|
+
});
|
|
815
|
+
var webUrlTool = createProviderToolFactoryWithOutputSchema({
|
|
816
|
+
id: "friendli.web:url",
|
|
817
|
+
inputSchema,
|
|
818
|
+
outputSchema
|
|
819
|
+
});
|
|
820
|
+
var mathCalendarTool = createProviderToolFactoryWithOutputSchema({
|
|
821
|
+
id: "friendli.math:calendar",
|
|
822
|
+
inputSchema,
|
|
823
|
+
outputSchema
|
|
824
|
+
});
|
|
825
|
+
var mathStatisticsTool = createProviderToolFactoryWithOutputSchema({
|
|
826
|
+
id: "friendli.math:statistics",
|
|
827
|
+
inputSchema,
|
|
828
|
+
outputSchema
|
|
829
|
+
});
|
|
830
|
+
var mathCalculatorTool = createProviderToolFactoryWithOutputSchema({
|
|
831
|
+
id: "friendli.math:calculator",
|
|
832
|
+
inputSchema,
|
|
833
|
+
outputSchema
|
|
834
|
+
});
|
|
835
|
+
var codePythonInterpreterTool = createProviderToolFactoryWithOutputSchema({
|
|
836
|
+
id: "friendli.code:python-interpreter",
|
|
837
|
+
inputSchema,
|
|
838
|
+
outputSchema
|
|
839
|
+
});
|
|
840
|
+
var linkupSearchTool = createProviderToolFactoryWithOutputSchema({
|
|
841
|
+
id: "friendli.linkup:search",
|
|
842
|
+
inputSchema,
|
|
843
|
+
outputSchema
|
|
844
|
+
});
|
|
845
|
+
function webSearch() {
|
|
846
|
+
return webSearchTool({});
|
|
678
847
|
}
|
|
679
|
-
function
|
|
680
|
-
return {
|
|
681
|
-
type: "provider-defined",
|
|
682
|
-
id: "friendli.web:search",
|
|
683
|
-
name: "web:search",
|
|
684
|
-
args: {}
|
|
685
|
-
};
|
|
848
|
+
function webUrl() {
|
|
849
|
+
return webUrlTool({});
|
|
686
850
|
}
|
|
687
|
-
function
|
|
688
|
-
return {
|
|
689
|
-
type: "provider-defined",
|
|
690
|
-
id: "friendli.math:calendar",
|
|
691
|
-
name: "math:calendar",
|
|
692
|
-
args: {}
|
|
693
|
-
};
|
|
851
|
+
function mathCalendar() {
|
|
852
|
+
return mathCalendarTool({});
|
|
694
853
|
}
|
|
695
|
-
function
|
|
696
|
-
return {
|
|
697
|
-
type: "provider-defined",
|
|
698
|
-
id: "friendli.math:statistics",
|
|
699
|
-
name: "math:statistics",
|
|
700
|
-
args: {}
|
|
701
|
-
};
|
|
854
|
+
function mathStatistics() {
|
|
855
|
+
return mathStatisticsTool({});
|
|
702
856
|
}
|
|
703
|
-
function
|
|
704
|
-
return {
|
|
705
|
-
type: "provider-defined",
|
|
706
|
-
id: "friendli.math:calculator",
|
|
707
|
-
name: "math:calculator",
|
|
708
|
-
args: {}
|
|
709
|
-
};
|
|
857
|
+
function mathCalculator() {
|
|
858
|
+
return mathCalculatorTool({});
|
|
710
859
|
}
|
|
711
|
-
function
|
|
712
|
-
return {
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
args: {}
|
|
717
|
-
};
|
|
860
|
+
function codePythonInterpreter() {
|
|
861
|
+
return codePythonInterpreterTool({});
|
|
862
|
+
}
|
|
863
|
+
function linkupSearch() {
|
|
864
|
+
return linkupSearchTool({});
|
|
718
865
|
}
|
|
719
866
|
var friendliTools = {
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
867
|
+
webSearch,
|
|
868
|
+
webUrl,
|
|
869
|
+
mathCalendar,
|
|
870
|
+
mathStatistics,
|
|
871
|
+
mathCalculator,
|
|
872
|
+
codePythonInterpreter,
|
|
873
|
+
linkupSearch
|
|
726
874
|
};
|
|
727
875
|
|
|
728
876
|
// src/get-available-models.ts
|
|
@@ -889,7 +1037,8 @@ function createFriendli(options = {}) {
|
|
|
889
1037
|
provider: `friendliai.${type}.chat`,
|
|
890
1038
|
url: ({ path }) => `${baseURL}${path}`,
|
|
891
1039
|
headers: getHeaders,
|
|
892
|
-
fetch: options.fetch
|
|
1040
|
+
fetch: options.fetch,
|
|
1041
|
+
includeUsage: options.includeUsage
|
|
893
1042
|
});
|
|
894
1043
|
};
|
|
895
1044
|
const createCompletionModel = (modelId) => {
|
|
@@ -903,7 +1052,7 @@ function createFriendli(options = {}) {
|
|
|
903
1052
|
});
|
|
904
1053
|
};
|
|
905
1054
|
const createTextEmbeddingModel = (modelId) => {
|
|
906
|
-
throw new NoSuchModelError({ modelId, modelType: "
|
|
1055
|
+
throw new NoSuchModelError({ modelId, modelType: "embeddingModel" });
|
|
907
1056
|
};
|
|
908
1057
|
const createImageModel = (modelId) => {
|
|
909
1058
|
throw new NoSuchModelError({ modelId, modelType: "imageModel" });
|
|
@@ -919,7 +1068,7 @@ function createFriendli(options = {}) {
|
|
|
919
1068
|
provider.chat = createLanguageModel;
|
|
920
1069
|
provider.completion = createCompletionModel;
|
|
921
1070
|
provider.embedding = createTextEmbeddingModel;
|
|
922
|
-
provider.
|
|
1071
|
+
provider.embeddingModel = createTextEmbeddingModel;
|
|
923
1072
|
provider.getAvailableModels = async (opts) => {
|
|
924
1073
|
var _a;
|
|
925
1074
|
const defaultURL = "https://api-internal.friendli.ai/api/graphql";
|