@friendliai/ai-provider 1.0.0-beta.0 → 1.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +37 -0
- package/README.md +20 -0
- package/dist/index.d.mts +69 -31
- package/dist/index.d.ts +69 -31
- package/dist/index.js +347 -197
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +333 -184
- package/dist/index.mjs.map +1 -1
- package/package.json +9 -8
package/dist/index.js
CHANGED
|
@@ -27,30 +27,101 @@ module.exports = __toCommonJS(index_exports);
|
|
|
27
27
|
|
|
28
28
|
// src/friendli-provider.ts
|
|
29
29
|
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
30
|
-
var
|
|
31
|
-
var
|
|
30
|
+
var import_provider4 = require("@ai-sdk/provider");
|
|
31
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
32
32
|
|
|
33
33
|
// src/friendli-chat-language-model.ts
|
|
34
34
|
var import_internal = require("@ai-sdk/openai-compatible/internal");
|
|
35
|
-
var
|
|
35
|
+
var import_provider3 = require("@ai-sdk/provider");
|
|
36
36
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
37
37
|
var import_v42 = require("zod/v4");
|
|
38
38
|
|
|
39
39
|
// src/friendli-error.ts
|
|
40
|
+
var import_provider = require("@ai-sdk/provider");
|
|
40
41
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
41
42
|
var import_v4 = require("zod/v4");
|
|
42
|
-
var
|
|
43
|
+
var friendliErrorResponseSchema = import_v4.z.object({
|
|
43
44
|
message: import_v4.z.string(),
|
|
44
|
-
error: import_v4.z.record(import_v4.z.string(), import_v4.z.any())
|
|
45
|
+
error: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
|
|
45
46
|
});
|
|
47
|
+
var openAIStyleErrorResponseSchema = import_v4.z.object({
|
|
48
|
+
error: import_v4.z.object({
|
|
49
|
+
message: import_v4.z.string()
|
|
50
|
+
}).loose()
|
|
51
|
+
}).loose();
|
|
52
|
+
var friendliaiErrorSchema = import_v4.z.union([
|
|
53
|
+
// OpenAI/OpenRouter style error: { "error": { "message": "..." } }
|
|
54
|
+
openAIStyleErrorResponseSchema,
|
|
55
|
+
// Friendli style error: { "message": "...", "error": { ... } }
|
|
56
|
+
friendliErrorResponseSchema
|
|
57
|
+
]);
|
|
46
58
|
var friendliaiErrorStructure = {
|
|
47
59
|
errorSchema: friendliaiErrorSchema,
|
|
48
|
-
errorToMessage: (data) =>
|
|
60
|
+
errorToMessage: (data) => {
|
|
61
|
+
if (typeof data === "object" && data != null && "error" in data && typeof data.error === "object" && data.error != null && "message" in data.error && typeof data.error.message === "string") {
|
|
62
|
+
return data.error.message;
|
|
63
|
+
}
|
|
64
|
+
if (typeof data === "object" && data != null && "message" in data && typeof data.message === "string") {
|
|
65
|
+
return data.message;
|
|
66
|
+
}
|
|
67
|
+
return "Unknown error";
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
var friendliaiFailedResponseHandler = async ({
|
|
71
|
+
response,
|
|
72
|
+
url,
|
|
73
|
+
requestBodyValues
|
|
74
|
+
}) => {
|
|
75
|
+
const responseBody = await response.text();
|
|
76
|
+
const responseHeaders = {};
|
|
77
|
+
response.headers.forEach((value, key) => {
|
|
78
|
+
responseHeaders[key] = value;
|
|
79
|
+
});
|
|
80
|
+
const baseErrorOptions = {
|
|
81
|
+
url,
|
|
82
|
+
requestBodyValues,
|
|
83
|
+
statusCode: response.status,
|
|
84
|
+
responseHeaders,
|
|
85
|
+
responseBody
|
|
86
|
+
};
|
|
87
|
+
const trimmedBody = responseBody.trim();
|
|
88
|
+
if (trimmedBody === "") {
|
|
89
|
+
const fallback2 = response.statusText || `Request failed with status ${response.status}`;
|
|
90
|
+
return {
|
|
91
|
+
responseHeaders,
|
|
92
|
+
value: new import_provider.APICallError({
|
|
93
|
+
message: fallback2,
|
|
94
|
+
...baseErrorOptions
|
|
95
|
+
})
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
const parsedError = await (0, import_provider_utils.safeParseJSON)({
|
|
99
|
+
text: responseBody,
|
|
100
|
+
schema: friendliaiErrorSchema
|
|
101
|
+
});
|
|
102
|
+
if (parsedError.success) {
|
|
103
|
+
return {
|
|
104
|
+
responseHeaders,
|
|
105
|
+
value: new import_provider.APICallError({
|
|
106
|
+
message: friendliaiErrorStructure.errorToMessage(parsedError.value),
|
|
107
|
+
data: parsedError.value,
|
|
108
|
+
...baseErrorOptions
|
|
109
|
+
})
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
const fallback = trimmedBody || response.statusText || `Request failed with status ${response.status}`;
|
|
113
|
+
return {
|
|
114
|
+
responseHeaders,
|
|
115
|
+
value: new import_provider.APICallError({
|
|
116
|
+
message: fallback,
|
|
117
|
+
cause: parsedError.error,
|
|
118
|
+
...baseErrorOptions
|
|
119
|
+
})
|
|
120
|
+
};
|
|
49
121
|
};
|
|
50
|
-
var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(friendliaiErrorStructure);
|
|
51
122
|
|
|
52
123
|
// src/friendli-prepare-tools.ts
|
|
53
|
-
var
|
|
124
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
54
125
|
function prepareTools({
|
|
55
126
|
tools,
|
|
56
127
|
toolChoice
|
|
@@ -63,9 +134,11 @@ function prepareTools({
|
|
|
63
134
|
}
|
|
64
135
|
const openaiCompatTools = [];
|
|
65
136
|
for (const tool of tools) {
|
|
66
|
-
if (tool.type === "provider
|
|
137
|
+
if (tool.type === "provider") {
|
|
67
138
|
openaiCompatTools.push({
|
|
68
|
-
// NOTE:
|
|
139
|
+
// NOTE: Friendli tool-assisted API expects provider tool types like "web:search".
|
|
140
|
+
// We derive it from the provider tool id (e.g. "friendli.web:search" -> "web:search")
|
|
141
|
+
// instead of tool.name (often "web_search").
|
|
69
142
|
type: (_a = tool.id.split(".")[1]) != null ? _a : "unknown"
|
|
70
143
|
});
|
|
71
144
|
} else {
|
|
@@ -99,7 +172,7 @@ function prepareTools({
|
|
|
99
172
|
};
|
|
100
173
|
default: {
|
|
101
174
|
const _exhaustiveCheck = type;
|
|
102
|
-
throw new
|
|
175
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
103
176
|
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
104
177
|
});
|
|
105
178
|
}
|
|
@@ -107,16 +180,38 @@ function prepareTools({
|
|
|
107
180
|
}
|
|
108
181
|
|
|
109
182
|
// src/friendli-chat-language-model.ts
|
|
183
|
+
function isRecord(value) {
|
|
184
|
+
return typeof value === "object" && value != null;
|
|
185
|
+
}
|
|
186
|
+
function isHostedToolExecutionChunk(value) {
|
|
187
|
+
if (!isRecord(value)) return false;
|
|
188
|
+
return typeof value.status === "string" && typeof value.name === "string" && Array.isArray(value.parameters);
|
|
189
|
+
}
|
|
190
|
+
function getChunkErrorMessage(value) {
|
|
191
|
+
if (!isRecord(value)) return void 0;
|
|
192
|
+
if (typeof value.message === "string") {
|
|
193
|
+
return value.message;
|
|
194
|
+
}
|
|
195
|
+
const nestedError = value.error;
|
|
196
|
+
if (isRecord(nestedError) && typeof nestedError.message === "string") {
|
|
197
|
+
return nestedError.message;
|
|
198
|
+
}
|
|
199
|
+
return void 0;
|
|
200
|
+
}
|
|
201
|
+
function isOpenAIChatChunk(value) {
|
|
202
|
+
if (!isRecord(value)) return false;
|
|
203
|
+
return Array.isArray(value.choices);
|
|
204
|
+
}
|
|
110
205
|
var FriendliAIChatLanguageModel = class {
|
|
111
206
|
// type inferred via constructor
|
|
112
207
|
constructor(modelId, config) {
|
|
113
|
-
this.specificationVersion = "
|
|
208
|
+
this.specificationVersion = "v3";
|
|
114
209
|
var _a;
|
|
115
210
|
this.modelId = modelId;
|
|
116
211
|
this.config = config;
|
|
117
212
|
const errorStructure = friendliaiErrorStructure;
|
|
118
213
|
this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
|
|
119
|
-
this.failedResponseHandler =
|
|
214
|
+
this.failedResponseHandler = friendliaiFailedResponseHandler;
|
|
120
215
|
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
|
|
121
216
|
}
|
|
122
217
|
get provider() {
|
|
@@ -144,18 +239,24 @@ var FriendliAIChatLanguageModel = class {
|
|
|
144
239
|
}) {
|
|
145
240
|
var _a;
|
|
146
241
|
const warnings = [];
|
|
147
|
-
if (topK != null) {
|
|
148
|
-
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
149
|
-
}
|
|
150
242
|
const friendliOptions = await (0, import_provider_utils2.parseProviderOptions)({
|
|
243
|
+
provider: "friendliai",
|
|
244
|
+
providerOptions,
|
|
245
|
+
schema: friendliProviderOptionsSchema
|
|
246
|
+
});
|
|
247
|
+
const legacyFriendliOptions = await (0, import_provider_utils2.parseProviderOptions)({
|
|
151
248
|
provider: "friendli",
|
|
152
249
|
providerOptions,
|
|
153
250
|
schema: friendliProviderOptionsSchema
|
|
154
251
|
});
|
|
252
|
+
const options = {
|
|
253
|
+
...legacyFriendliOptions,
|
|
254
|
+
...friendliOptions
|
|
255
|
+
};
|
|
155
256
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
156
257
|
warnings.push({
|
|
157
|
-
type: "unsupported
|
|
158
|
-
|
|
258
|
+
type: "unsupported",
|
|
259
|
+
feature: "responseFormat",
|
|
159
260
|
details: "JSON response format schema is only supported with structuredOutputs"
|
|
160
261
|
});
|
|
161
262
|
}
|
|
@@ -167,6 +268,14 @@ var FriendliAIChatLanguageModel = class {
|
|
|
167
268
|
tools,
|
|
168
269
|
toolChoice
|
|
169
270
|
});
|
|
271
|
+
const isToolsPresent = openaiTools != null && openaiTools.length > 0;
|
|
272
|
+
if (isToolsPresent && (responseFormat != null || (options == null ? void 0 : options.regex) != null)) {
|
|
273
|
+
warnings.push({
|
|
274
|
+
type: "unsupported",
|
|
275
|
+
feature: "responseFormat",
|
|
276
|
+
details: "response_format is not supported when tools are present."
|
|
277
|
+
});
|
|
278
|
+
}
|
|
170
279
|
return {
|
|
171
280
|
args: {
|
|
172
281
|
// >>> hard-coded default options >>>
|
|
@@ -178,36 +287,39 @@ var FriendliAIChatLanguageModel = class {
|
|
|
178
287
|
max_tokens: maxOutputTokens,
|
|
179
288
|
temperature,
|
|
180
289
|
top_p: topP,
|
|
290
|
+
top_k: topK,
|
|
181
291
|
frequency_penalty: frequencyPenalty,
|
|
182
292
|
presence_penalty: presencePenalty,
|
|
183
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
293
|
+
response_format: isToolsPresent === false ? (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
184
294
|
type: "json_schema",
|
|
185
295
|
json_schema: {
|
|
186
296
|
schema: responseFormat.schema,
|
|
187
297
|
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
188
298
|
description: responseFormat.description
|
|
189
299
|
}
|
|
190
|
-
} : { type: "json_object" } : (
|
|
300
|
+
} : { type: "json_object" } : (options == null ? void 0 : options.regex) != null ? {
|
|
191
301
|
type: "regex",
|
|
192
|
-
schema:
|
|
193
|
-
} : void 0,
|
|
302
|
+
schema: options.regex
|
|
303
|
+
} : void 0 : void 0,
|
|
194
304
|
stop: stopSequences,
|
|
195
305
|
seed,
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
306
|
+
min_p: options == null ? void 0 : options.minP,
|
|
307
|
+
repetition_penalty: options == null ? void 0 : options.repetitionPenalty,
|
|
308
|
+
xtc_threshold: options == null ? void 0 : options.xtcThreshold,
|
|
309
|
+
xtc_probability: options == null ? void 0 : options.xtcProbability,
|
|
310
|
+
...(options == null ? void 0 : options.chat_template_kwargs) ? { chat_template_kwargs: options.chat_template_kwargs } : {},
|
|
199
311
|
// messages:
|
|
200
312
|
messages: (0, import_internal.convertToOpenAICompatibleChatMessages)(prompt),
|
|
201
313
|
// tools:
|
|
202
314
|
tools: openaiTools,
|
|
203
315
|
tool_choice: openaiToolChoice,
|
|
204
|
-
parallel_tool_calls:
|
|
316
|
+
parallel_tool_calls: options == null ? void 0 : options.parallelToolCalls
|
|
205
317
|
},
|
|
206
318
|
warnings: [...warnings, ...toolWarnings]
|
|
207
319
|
};
|
|
208
320
|
}
|
|
209
321
|
async doGenerate(options) {
|
|
210
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k
|
|
322
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
211
323
|
const { args, warnings } = await this.getArgs({ ...options, stream: false });
|
|
212
324
|
const body = JSON.stringify(args);
|
|
213
325
|
const {
|
|
@@ -253,11 +365,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
253
365
|
content,
|
|
254
366
|
finishReason: (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason),
|
|
255
367
|
usage: {
|
|
256
|
-
inputTokens:
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
368
|
+
inputTokens: {
|
|
369
|
+
total: (_c = (_b = responseBody.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : void 0,
|
|
370
|
+
noCache: void 0,
|
|
371
|
+
cacheRead: (_f = (_e = (_d = responseBody.usage) == null ? void 0 : _d.prompt_tokens_details) == null ? void 0 : _e.cached_tokens) != null ? _f : void 0,
|
|
372
|
+
cacheWrite: void 0
|
|
373
|
+
},
|
|
374
|
+
outputTokens: {
|
|
375
|
+
total: (_h = (_g = responseBody.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : void 0,
|
|
376
|
+
text: void 0,
|
|
377
|
+
reasoning: (_k = (_j = (_i = responseBody.usage) == null ? void 0 : _i.completion_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0
|
|
378
|
+
}
|
|
261
379
|
},
|
|
262
380
|
// providerMetadata,
|
|
263
381
|
request: { body },
|
|
@@ -314,9 +432,10 @@ var FriendliAIChatLanguageModel = class {
|
|
|
314
432
|
start(controller) {
|
|
315
433
|
controller.enqueue({ type: "stream-start", warnings });
|
|
316
434
|
},
|
|
317
|
-
//
|
|
435
|
+
// NOTE: Chunk values can contain OpenAI-compatible deltas, hosted tool events, and error events.
|
|
436
|
+
// We narrow with type guards for safe handling.
|
|
318
437
|
transform(chunk, controller) {
|
|
319
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
438
|
+
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
320
439
|
if (!chunk.success) {
|
|
321
440
|
finishReason = "error";
|
|
322
441
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -324,26 +443,79 @@ var FriendliAIChatLanguageModel = class {
|
|
|
324
443
|
}
|
|
325
444
|
const value = chunk.value;
|
|
326
445
|
metadataExtractor == null ? void 0 : metadataExtractor.processChunk(chunk.rawValue);
|
|
327
|
-
if (
|
|
446
|
+
if (isHostedToolExecutionChunk(value)) {
|
|
447
|
+
const toolCallId = (_a2 = value.tool_call_id) != null ? _a2 : (0, import_provider_utils2.generateId)();
|
|
448
|
+
switch (value.status) {
|
|
449
|
+
case "STARTED":
|
|
450
|
+
controller.enqueue({
|
|
451
|
+
type: "tool-call",
|
|
452
|
+
toolCallId,
|
|
453
|
+
toolName: value.name,
|
|
454
|
+
input: JSON.stringify(
|
|
455
|
+
Object.fromEntries(value.parameters.map((p) => [p.name, p.value]))
|
|
456
|
+
),
|
|
457
|
+
providerExecuted: true
|
|
458
|
+
});
|
|
459
|
+
break;
|
|
460
|
+
case "UPDATING":
|
|
461
|
+
break;
|
|
462
|
+
case "ENDED":
|
|
463
|
+
controller.enqueue({
|
|
464
|
+
type: "tool-result",
|
|
465
|
+
toolCallId,
|
|
466
|
+
toolName: value.name,
|
|
467
|
+
result: (_b = value.result) != null ? _b : ""
|
|
468
|
+
});
|
|
469
|
+
break;
|
|
470
|
+
case "ERRORED":
|
|
471
|
+
finishReason = "error";
|
|
472
|
+
controller.enqueue({
|
|
473
|
+
type: "tool-result",
|
|
474
|
+
toolCallId,
|
|
475
|
+
toolName: value.name,
|
|
476
|
+
result: (_d = (_c = value.error) == null ? void 0 : _c.msg) != null ? _d : "Unknown error",
|
|
477
|
+
isError: true
|
|
478
|
+
});
|
|
479
|
+
break;
|
|
480
|
+
default:
|
|
481
|
+
finishReason = "error";
|
|
482
|
+
controller.enqueue({
|
|
483
|
+
type: "error",
|
|
484
|
+
error: new Error(`Unsupported tool call status: ${value.status}`)
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
return;
|
|
488
|
+
}
|
|
489
|
+
const chunkErrorMessage = getChunkErrorMessage(value);
|
|
490
|
+
if (chunkErrorMessage != null) {
|
|
328
491
|
finishReason = "error";
|
|
329
|
-
controller.enqueue({ type: "error", error:
|
|
492
|
+
controller.enqueue({ type: "error", error: chunkErrorMessage });
|
|
330
493
|
return;
|
|
331
494
|
}
|
|
495
|
+
if (!isOpenAIChatChunk(value)) {
|
|
496
|
+
finishReason = "error";
|
|
497
|
+
controller.enqueue({
|
|
498
|
+
type: "error",
|
|
499
|
+
error: new Error("Unsupported chunk shape")
|
|
500
|
+
});
|
|
501
|
+
return;
|
|
502
|
+
}
|
|
503
|
+
const chunkValue = value;
|
|
332
504
|
if (isFirstChunk) {
|
|
333
505
|
isFirstChunk = false;
|
|
334
506
|
controller.enqueue({
|
|
335
507
|
type: "response-metadata",
|
|
336
|
-
...(0, import_internal.getResponseMetadata)(
|
|
508
|
+
...(0, import_internal.getResponseMetadata)(chunkValue)
|
|
337
509
|
});
|
|
338
510
|
}
|
|
339
|
-
if (
|
|
511
|
+
if (chunkValue.usage != null) {
|
|
340
512
|
const {
|
|
341
513
|
prompt_tokens,
|
|
342
514
|
completion_tokens,
|
|
343
515
|
total_tokens,
|
|
344
516
|
prompt_tokens_details,
|
|
345
517
|
completion_tokens_details
|
|
346
|
-
} =
|
|
518
|
+
} = chunkValue.usage;
|
|
347
519
|
usage.promptTokens = prompt_tokens != null ? prompt_tokens : void 0;
|
|
348
520
|
usage.completionTokens = completion_tokens != null ? completion_tokens : void 0;
|
|
349
521
|
usage.totalTokens = total_tokens != null ? total_tokens : void 0;
|
|
@@ -360,7 +532,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
360
532
|
usage.promptTokensDetails.cachedTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
|
|
361
533
|
}
|
|
362
534
|
}
|
|
363
|
-
const choice =
|
|
535
|
+
const choice = chunkValue.choices[0];
|
|
364
536
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
365
537
|
finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason);
|
|
366
538
|
}
|
|
@@ -387,19 +559,19 @@ var FriendliAIChatLanguageModel = class {
|
|
|
387
559
|
const index = toolCallDelta.index;
|
|
388
560
|
if (toolCalls[index] == null) {
|
|
389
561
|
if (toolCallDelta.type !== "function") {
|
|
390
|
-
throw new
|
|
562
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
391
563
|
data: toolCallDelta,
|
|
392
564
|
message: `Expected 'function' type.`
|
|
393
565
|
});
|
|
394
566
|
}
|
|
395
567
|
if (toolCallDelta.id == null) {
|
|
396
|
-
throw new
|
|
568
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
397
569
|
data: toolCallDelta,
|
|
398
570
|
message: `Expected 'id' to be a string.`
|
|
399
571
|
});
|
|
400
572
|
}
|
|
401
|
-
if (((
|
|
402
|
-
throw new
|
|
573
|
+
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
|
|
574
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
403
575
|
data: toolCallDelta,
|
|
404
576
|
message: `Expected 'function.name' to be a string.`
|
|
405
577
|
});
|
|
@@ -409,12 +581,12 @@ var FriendliAIChatLanguageModel = class {
|
|
|
409
581
|
type: "function",
|
|
410
582
|
function: {
|
|
411
583
|
name: toolCallDelta.function.name,
|
|
412
|
-
arguments: (
|
|
584
|
+
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
|
|
413
585
|
},
|
|
414
586
|
hasFinished: false
|
|
415
587
|
};
|
|
416
588
|
const toolCall2 = toolCalls[index];
|
|
417
|
-
if (((
|
|
589
|
+
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
|
|
418
590
|
if (toolCall2.function.arguments.length > 0) {
|
|
419
591
|
controller.enqueue({
|
|
420
592
|
type: "tool-input-delta",
|
|
@@ -425,7 +597,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
425
597
|
if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
|
|
426
598
|
controller.enqueue({
|
|
427
599
|
type: "tool-call",
|
|
428
|
-
toolCallId: (
|
|
600
|
+
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils2.generateId)(),
|
|
429
601
|
toolName: toolCall2.function.name,
|
|
430
602
|
input: toolCall2.function.arguments
|
|
431
603
|
});
|
|
@@ -438,18 +610,18 @@ var FriendliAIChatLanguageModel = class {
|
|
|
438
610
|
if (toolCall.hasFinished) {
|
|
439
611
|
continue;
|
|
440
612
|
}
|
|
441
|
-
if (((
|
|
442
|
-
toolCall.function.arguments += (
|
|
613
|
+
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
|
|
614
|
+
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
|
|
443
615
|
}
|
|
444
616
|
controller.enqueue({
|
|
445
617
|
type: "tool-input-delta",
|
|
446
618
|
id: toolCall.id,
|
|
447
|
-
delta: (
|
|
619
|
+
delta: (_n = (_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null ? _n : ""
|
|
448
620
|
});
|
|
449
|
-
if (((
|
|
621
|
+
if (((_o = toolCall.function) == null ? void 0 : _o.name) != null && ((_p = toolCall.function) == null ? void 0 : _p.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
|
|
450
622
|
controller.enqueue({
|
|
451
623
|
type: "tool-call",
|
|
452
|
-
toolCallId: (
|
|
624
|
+
toolCallId: (_q = toolCall.id) != null ? _q : (0, import_provider_utils2.generateId)(),
|
|
453
625
|
toolName: toolCall.function.name,
|
|
454
626
|
input: toolCall.function.arguments
|
|
455
627
|
});
|
|
@@ -459,7 +631,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
459
631
|
}
|
|
460
632
|
},
|
|
461
633
|
flush(controller) {
|
|
462
|
-
var _a2, _b, _c, _d
|
|
634
|
+
var _a2, _b, _c, _d;
|
|
463
635
|
const providerMetadata = {
|
|
464
636
|
[providerOptionsName]: {},
|
|
465
637
|
...metadataExtractor == null ? void 0 : metadataExtractor.buildMetadata()
|
|
@@ -474,11 +646,17 @@ var FriendliAIChatLanguageModel = class {
|
|
|
474
646
|
type: "finish",
|
|
475
647
|
finishReason,
|
|
476
648
|
usage: {
|
|
477
|
-
inputTokens:
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
649
|
+
inputTokens: {
|
|
650
|
+
total: (_a2 = usage.promptTokens) != null ? _a2 : void 0,
|
|
651
|
+
noCache: void 0,
|
|
652
|
+
cacheRead: (_b = usage.promptTokensDetails.cachedTokens) != null ? _b : void 0,
|
|
653
|
+
cacheWrite: void 0
|
|
654
|
+
},
|
|
655
|
+
outputTokens: {
|
|
656
|
+
total: (_c = usage.completionTokens) != null ? _c : void 0,
|
|
657
|
+
text: void 0,
|
|
658
|
+
reasoning: (_d = usage.completionTokensDetails.reasoningTokens) != null ? _d : void 0
|
|
659
|
+
}
|
|
482
660
|
},
|
|
483
661
|
providerMetadata
|
|
484
662
|
});
|
|
@@ -490,86 +668,6 @@ var FriendliAIChatLanguageModel = class {
|
|
|
490
668
|
};
|
|
491
669
|
}
|
|
492
670
|
};
|
|
493
|
-
var friendliAIChatResponseSchema = import_v42.z.object({
|
|
494
|
-
id: import_v42.z.string().nullish(),
|
|
495
|
-
created: import_v42.z.number().nullish(),
|
|
496
|
-
model: import_v42.z.string().nullish(),
|
|
497
|
-
choices: import_v42.z.array(
|
|
498
|
-
import_v42.z.object({
|
|
499
|
-
message: import_v42.z.object({
|
|
500
|
-
role: import_v42.z.literal("assistant").nullish(),
|
|
501
|
-
content: import_v42.z.string().nullish(),
|
|
502
|
-
tool_calls: import_v42.z.array(
|
|
503
|
-
import_v42.z.object({
|
|
504
|
-
id: import_v42.z.string().nullish(),
|
|
505
|
-
type: import_v42.z.literal("function"),
|
|
506
|
-
function: import_v42.z.object({
|
|
507
|
-
name: import_v42.z.string(),
|
|
508
|
-
arguments: import_v42.z.union([import_v42.z.string(), import_v42.z.any()]).nullish()
|
|
509
|
-
})
|
|
510
|
-
})
|
|
511
|
-
).nullish()
|
|
512
|
-
}),
|
|
513
|
-
finish_reason: import_v42.z.string().nullish()
|
|
514
|
-
})
|
|
515
|
-
),
|
|
516
|
-
usage: import_v42.z.object({
|
|
517
|
-
prompt_tokens: import_v42.z.number().nullish(),
|
|
518
|
-
completion_tokens: import_v42.z.number().nullish()
|
|
519
|
-
}).nullish()
|
|
520
|
-
});
|
|
521
|
-
var friendliaiChatChunkSchema = import_v42.z.union([
|
|
522
|
-
import_v42.z.object({
|
|
523
|
-
id: import_v42.z.string().nullish(),
|
|
524
|
-
created: import_v42.z.number().nullish(),
|
|
525
|
-
model: import_v42.z.string().nullish(),
|
|
526
|
-
choices: import_v42.z.array(
|
|
527
|
-
import_v42.z.object({
|
|
528
|
-
delta: import_v42.z.object({
|
|
529
|
-
role: import_v42.z.enum(["assistant"]).nullish(),
|
|
530
|
-
content: import_v42.z.string().nullish(),
|
|
531
|
-
tool_calls: import_v42.z.array(
|
|
532
|
-
import_v42.z.object({
|
|
533
|
-
index: import_v42.z.number(),
|
|
534
|
-
id: import_v42.z.string().nullish(),
|
|
535
|
-
type: import_v42.z.literal("function").optional(),
|
|
536
|
-
function: import_v42.z.object({
|
|
537
|
-
name: import_v42.z.string().nullish(),
|
|
538
|
-
arguments: import_v42.z.string().nullish()
|
|
539
|
-
})
|
|
540
|
-
})
|
|
541
|
-
).nullish()
|
|
542
|
-
}).nullish(),
|
|
543
|
-
finish_reason: import_v42.z.string().nullish()
|
|
544
|
-
})
|
|
545
|
-
),
|
|
546
|
-
usage: import_v42.z.object({
|
|
547
|
-
prompt_tokens: import_v42.z.number().nullish(),
|
|
548
|
-
completion_tokens: import_v42.z.number().nullish()
|
|
549
|
-
}).nullish()
|
|
550
|
-
}),
|
|
551
|
-
import_v42.z.object({
|
|
552
|
-
name: import_v42.z.string(),
|
|
553
|
-
status: import_v42.z.enum(["ENDED", "STARTED", "ERRORED", "UPDATING"]),
|
|
554
|
-
message: import_v42.z.null(),
|
|
555
|
-
parameters: import_v42.z.array(
|
|
556
|
-
import_v42.z.object({
|
|
557
|
-
name: import_v42.z.string(),
|
|
558
|
-
value: import_v42.z.string()
|
|
559
|
-
})
|
|
560
|
-
),
|
|
561
|
-
result: import_v42.z.string().nullable(),
|
|
562
|
-
error: import_v42.z.object({
|
|
563
|
-
type: import_v42.z.enum(["INVALID_PARAMETER", "UNKNOWN"]),
|
|
564
|
-
msg: import_v42.z.string()
|
|
565
|
-
}).nullable(),
|
|
566
|
-
timestamp: import_v42.z.number(),
|
|
567
|
-
usage: import_v42.z.null(),
|
|
568
|
-
tool_call_id: import_v42.z.string().nullable()
|
|
569
|
-
// temporary fix for "file:text" tool calls
|
|
570
|
-
}),
|
|
571
|
-
friendliaiErrorSchema
|
|
572
|
-
]);
|
|
573
671
|
var openaiCompatibleTokenUsageSchema = import_v42.z.object({
|
|
574
672
|
prompt_tokens: import_v42.z.number().nullish(),
|
|
575
673
|
completion_tokens: import_v42.z.number().nullish(),
|
|
@@ -637,6 +735,25 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v42.z.union(
|
|
|
637
735
|
),
|
|
638
736
|
usage: openaiCompatibleTokenUsageSchema
|
|
639
737
|
}),
|
|
738
|
+
import_v42.z.object({
|
|
739
|
+
name: import_v42.z.string(),
|
|
740
|
+
status: import_v42.z.enum(["ENDED", "STARTED", "ERRORED", "UPDATING"]),
|
|
741
|
+
message: import_v42.z.null(),
|
|
742
|
+
parameters: import_v42.z.array(
|
|
743
|
+
import_v42.z.object({
|
|
744
|
+
name: import_v42.z.string(),
|
|
745
|
+
value: import_v42.z.string()
|
|
746
|
+
})
|
|
747
|
+
),
|
|
748
|
+
result: import_v42.z.string().nullable(),
|
|
749
|
+
error: import_v42.z.object({
|
|
750
|
+
type: import_v42.z.enum(["INVALID_PARAMETER", "UNKNOWN"]),
|
|
751
|
+
msg: import_v42.z.string()
|
|
752
|
+
}).nullable(),
|
|
753
|
+
timestamp: import_v42.z.number(),
|
|
754
|
+
usage: import_v42.z.null(),
|
|
755
|
+
tool_call_id: import_v42.z.string().nullable()
|
|
756
|
+
}),
|
|
640
757
|
errorSchema
|
|
641
758
|
]);
|
|
642
759
|
var friendliProviderOptionsSchema = import_v42.z.object({
|
|
@@ -649,15 +766,34 @@ var friendliProviderOptionsSchema = import_v42.z.object({
|
|
|
649
766
|
*/
|
|
650
767
|
// regex: z.instanceof(RegExp).nullish(),
|
|
651
768
|
regex: import_v42.z.string().nullish(),
|
|
652
|
-
chat_template_kwargs: import_v42.z.record(import_v42.z.string(), import_v42.z.any()).nullish()
|
|
769
|
+
chat_template_kwargs: import_v42.z.record(import_v42.z.string(), import_v42.z.any()).nullish(),
|
|
770
|
+
/**
|
|
771
|
+
* A scaling factor used to determine the minimum token probability threshold.
|
|
772
|
+
*/
|
|
773
|
+
minP: import_v42.z.number().nullish(),
|
|
774
|
+
/**
|
|
775
|
+
* Penalizes tokens that have already appeared in the generated result.
|
|
776
|
+
*/
|
|
777
|
+
repetitionPenalty: import_v42.z.number().nullish(),
|
|
778
|
+
/**
|
|
779
|
+
* A probability threshold used to identify “top choice” tokens for exclusion in XTC sampling.
|
|
780
|
+
*/
|
|
781
|
+
xtcThreshold: import_v42.z.number().nullish(),
|
|
782
|
+
/**
|
|
783
|
+
* The probability that XTC (Exclude Top Choices) filtering will be applied for each sampling decision.
|
|
784
|
+
*/
|
|
785
|
+
xtcProbability: import_v42.z.number().nullish()
|
|
653
786
|
});
|
|
654
787
|
|
|
655
788
|
// src/friendli-settings.ts
|
|
656
789
|
var FriendliAIServerlessModelIds = [
|
|
790
|
+
"MiniMaxAI/MiniMax-M2",
|
|
791
|
+
"zai-org/GLM-4.6",
|
|
657
792
|
"LGAI-EXAONE/EXAONE-4.0.1-32B",
|
|
658
793
|
"skt/A.X-4.0",
|
|
659
794
|
"skt/A.X-3.1",
|
|
660
795
|
"naver-hyperclovax/HyperCLOVAX-SEED-Think-14B",
|
|
796
|
+
"deepseek-ai/DeepSeek-V3.1",
|
|
661
797
|
"deepseek-ai/DeepSeek-R1-0528",
|
|
662
798
|
"meta-llama/Llama-4-Maverick-17B-128E-Instruct",
|
|
663
799
|
"meta-llama/Llama-4-Scout-17B-16E-Instruct",
|
|
@@ -676,65 +812,78 @@ var FriendliAIServerlessModelIds = [
|
|
|
676
812
|
];
|
|
677
813
|
|
|
678
814
|
// src/friendli-tools.ts
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
815
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
816
|
+
var import_v43 = require("zod/v4");
|
|
817
|
+
var inputSchema = import_v43.z.object({}).loose();
|
|
818
|
+
var outputSchema = import_v43.z.unknown();
|
|
819
|
+
var webSearchTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
820
|
+
id: "friendli.web:search",
|
|
821
|
+
inputSchema,
|
|
822
|
+
outputSchema
|
|
823
|
+
});
|
|
824
|
+
var webUrlTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
825
|
+
id: "friendli.web:url",
|
|
826
|
+
inputSchema,
|
|
827
|
+
outputSchema
|
|
828
|
+
});
|
|
829
|
+
var mathCalendarTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
830
|
+
id: "friendli.math:calendar",
|
|
831
|
+
inputSchema,
|
|
832
|
+
outputSchema
|
|
833
|
+
});
|
|
834
|
+
var mathStatisticsTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
835
|
+
id: "friendli.math:statistics",
|
|
836
|
+
inputSchema,
|
|
837
|
+
outputSchema
|
|
838
|
+
});
|
|
839
|
+
var mathCalculatorTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
840
|
+
id: "friendli.math:calculator",
|
|
841
|
+
inputSchema,
|
|
842
|
+
outputSchema
|
|
843
|
+
});
|
|
844
|
+
var codePythonInterpreterTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
845
|
+
id: "friendli.code:python-interpreter",
|
|
846
|
+
inputSchema,
|
|
847
|
+
outputSchema
|
|
848
|
+
});
|
|
849
|
+
var linkupSearchTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
850
|
+
id: "friendli.linkup:search",
|
|
851
|
+
inputSchema,
|
|
852
|
+
outputSchema
|
|
853
|
+
});
|
|
854
|
+
function webSearch() {
|
|
855
|
+
return webSearchTool({});
|
|
686
856
|
}
|
|
687
|
-
function
|
|
688
|
-
return {
|
|
689
|
-
type: "provider-defined",
|
|
690
|
-
id: "friendli.web:search",
|
|
691
|
-
name: "web:search",
|
|
692
|
-
args: {}
|
|
693
|
-
};
|
|
857
|
+
function webUrl() {
|
|
858
|
+
return webUrlTool({});
|
|
694
859
|
}
|
|
695
|
-
function
|
|
696
|
-
return {
|
|
697
|
-
type: "provider-defined",
|
|
698
|
-
id: "friendli.math:calendar",
|
|
699
|
-
name: "math:calendar",
|
|
700
|
-
args: {}
|
|
701
|
-
};
|
|
860
|
+
function mathCalendar() {
|
|
861
|
+
return mathCalendarTool({});
|
|
702
862
|
}
|
|
703
|
-
function
|
|
704
|
-
return {
|
|
705
|
-
type: "provider-defined",
|
|
706
|
-
id: "friendli.math:statistics",
|
|
707
|
-
name: "math:statistics",
|
|
708
|
-
args: {}
|
|
709
|
-
};
|
|
863
|
+
function mathStatistics() {
|
|
864
|
+
return mathStatisticsTool({});
|
|
710
865
|
}
|
|
711
|
-
function
|
|
712
|
-
return {
|
|
713
|
-
type: "provider-defined",
|
|
714
|
-
id: "friendli.math:calculator",
|
|
715
|
-
name: "math:calculator",
|
|
716
|
-
args: {}
|
|
717
|
-
};
|
|
866
|
+
function mathCalculator() {
|
|
867
|
+
return mathCalculatorTool({});
|
|
718
868
|
}
|
|
719
|
-
function
|
|
720
|
-
return {
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
args: {}
|
|
725
|
-
};
|
|
869
|
+
function codePythonInterpreter() {
|
|
870
|
+
return codePythonInterpreterTool({});
|
|
871
|
+
}
|
|
872
|
+
function linkupSearch() {
|
|
873
|
+
return linkupSearchTool({});
|
|
726
874
|
}
|
|
727
875
|
var friendliTools = {
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
876
|
+
webSearch,
|
|
877
|
+
webUrl,
|
|
878
|
+
mathCalendar,
|
|
879
|
+
mathStatistics,
|
|
880
|
+
mathCalculator,
|
|
881
|
+
codePythonInterpreter,
|
|
882
|
+
linkupSearch
|
|
734
883
|
};
|
|
735
884
|
|
|
736
885
|
// src/get-available-models.ts
|
|
737
|
-
var
|
|
886
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
738
887
|
var DEFAULT_GRAPHQL_URL = "https://api-internal.friendli.ai/api/graphql";
|
|
739
888
|
async function postGraphQL(url, body, headers) {
|
|
740
889
|
const res = await fetch(url, {
|
|
@@ -770,7 +919,7 @@ async function getAvailableModelsImpl(options) {
|
|
|
770
919
|
var _a, _b, _c, _d, _e, _f;
|
|
771
920
|
let token;
|
|
772
921
|
try {
|
|
773
|
-
token = (_a = options.apiKey) != null ? _a : (0,
|
|
922
|
+
token = (_a = options.apiKey) != null ? _a : (0, import_provider_utils4.loadApiKey)({
|
|
774
923
|
apiKey: void 0,
|
|
775
924
|
environmentVariableName: "FRIENDLI_TOKEN",
|
|
776
925
|
description: "FRIENDLI_TOKEN"
|
|
@@ -843,7 +992,7 @@ async function getAvailableModelsImpl(options) {
|
|
|
843
992
|
// src/friendli-provider.ts
|
|
844
993
|
function createFriendli(options = {}) {
|
|
845
994
|
const getHeaders = () => ({
|
|
846
|
-
Authorization: `Bearer ${(0,
|
|
995
|
+
Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
|
|
847
996
|
apiKey: options.apiKey,
|
|
848
997
|
environmentVariableName: "FRIENDLI_TOKEN",
|
|
849
998
|
description: "FRIENDLI_TOKEN"
|
|
@@ -857,7 +1006,7 @@ function createFriendli(options = {}) {
|
|
|
857
1006
|
serverless_tools: "https://api.friendli.ai/serverless/tools/v1",
|
|
858
1007
|
dedicated: "https://api.friendli.ai/dedicated/v1"
|
|
859
1008
|
};
|
|
860
|
-
const customBaseURL = (0,
|
|
1009
|
+
const customBaseURL = (0, import_provider_utils5.withoutTrailingSlash)(baseURL);
|
|
861
1010
|
if (typeof customBaseURL === "string" && customBaseURL !== "dedicated" && customBaseURL !== "serverless" && customBaseURL !== "serverless-tools") {
|
|
862
1011
|
return { baseURL: customBaseURL, type: "custom" };
|
|
863
1012
|
}
|
|
@@ -897,7 +1046,8 @@ function createFriendli(options = {}) {
|
|
|
897
1046
|
provider: `friendliai.${type}.chat`,
|
|
898
1047
|
url: ({ path }) => `${baseURL}${path}`,
|
|
899
1048
|
headers: getHeaders,
|
|
900
|
-
fetch: options.fetch
|
|
1049
|
+
fetch: options.fetch,
|
|
1050
|
+
includeUsage: options.includeUsage
|
|
901
1051
|
});
|
|
902
1052
|
};
|
|
903
1053
|
const createCompletionModel = (modelId) => {
|
|
@@ -911,23 +1061,23 @@ function createFriendli(options = {}) {
|
|
|
911
1061
|
});
|
|
912
1062
|
};
|
|
913
1063
|
const createTextEmbeddingModel = (modelId) => {
|
|
914
|
-
throw new
|
|
1064
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "embeddingModel" });
|
|
915
1065
|
};
|
|
916
1066
|
const createImageModel = (modelId) => {
|
|
917
|
-
throw new
|
|
1067
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });
|
|
918
1068
|
};
|
|
919
1069
|
const createTranscriptionModel = (modelId) => {
|
|
920
|
-
throw new
|
|
1070
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
921
1071
|
};
|
|
922
1072
|
const createSpeechModel = (modelId) => {
|
|
923
|
-
throw new
|
|
1073
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
924
1074
|
};
|
|
925
1075
|
const provider = (modelId) => createLanguageModel(modelId);
|
|
926
1076
|
provider.languageModel = createLanguageModel;
|
|
927
1077
|
provider.chat = createLanguageModel;
|
|
928
1078
|
provider.completion = createCompletionModel;
|
|
929
1079
|
provider.embedding = createTextEmbeddingModel;
|
|
930
|
-
provider.
|
|
1080
|
+
provider.embeddingModel = createTextEmbeddingModel;
|
|
931
1081
|
provider.getAvailableModels = async (opts) => {
|
|
932
1082
|
var _a;
|
|
933
1083
|
const defaultURL = "https://api-internal.friendli.ai/api/graphql";
|