@friendliai/ai-provider 1.0.0-beta.1 → 1.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/README.md +20 -0
- package/dist/index.d.mts +17 -12
- package/dist/index.d.ts +17 -12
- package/dist/index.js +231 -159
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +222 -151
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -1
package/dist/index.js
CHANGED
|
@@ -27,30 +27,101 @@ module.exports = __toCommonJS(index_exports);
|
|
|
27
27
|
|
|
28
28
|
// src/friendli-provider.ts
|
|
29
29
|
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
30
|
-
var
|
|
30
|
+
var import_provider4 = require("@ai-sdk/provider");
|
|
31
31
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
32
32
|
|
|
33
33
|
// src/friendli-chat-language-model.ts
|
|
34
34
|
var import_internal = require("@ai-sdk/openai-compatible/internal");
|
|
35
|
-
var
|
|
35
|
+
var import_provider3 = require("@ai-sdk/provider");
|
|
36
36
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
37
37
|
var import_v42 = require("zod/v4");
|
|
38
38
|
|
|
39
39
|
// src/friendli-error.ts
|
|
40
|
+
var import_provider = require("@ai-sdk/provider");
|
|
40
41
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
41
42
|
var import_v4 = require("zod/v4");
|
|
42
|
-
var
|
|
43
|
+
var friendliErrorResponseSchema = import_v4.z.object({
|
|
43
44
|
message: import_v4.z.string(),
|
|
44
|
-
error: import_v4.z.record(import_v4.z.string(), import_v4.z.any())
|
|
45
|
+
error: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
|
|
45
46
|
});
|
|
47
|
+
var openAIStyleErrorResponseSchema = import_v4.z.object({
|
|
48
|
+
error: import_v4.z.object({
|
|
49
|
+
message: import_v4.z.string()
|
|
50
|
+
}).loose()
|
|
51
|
+
}).loose();
|
|
52
|
+
var friendliaiErrorSchema = import_v4.z.union([
|
|
53
|
+
// OpenAI/OpenRouter style error: { "error": { "message": "..." } }
|
|
54
|
+
openAIStyleErrorResponseSchema,
|
|
55
|
+
// Friendli style error: { "message": "...", "error": { ... } }
|
|
56
|
+
friendliErrorResponseSchema
|
|
57
|
+
]);
|
|
46
58
|
var friendliaiErrorStructure = {
|
|
47
59
|
errorSchema: friendliaiErrorSchema,
|
|
48
|
-
errorToMessage: (data) =>
|
|
60
|
+
errorToMessage: (data) => {
|
|
61
|
+
if (typeof data === "object" && data != null && "error" in data && typeof data.error === "object" && data.error != null && "message" in data.error && typeof data.error.message === "string") {
|
|
62
|
+
return data.error.message;
|
|
63
|
+
}
|
|
64
|
+
if (typeof data === "object" && data != null && "message" in data && typeof data.message === "string") {
|
|
65
|
+
return data.message;
|
|
66
|
+
}
|
|
67
|
+
return "Unknown error";
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
var friendliaiFailedResponseHandler = async ({
|
|
71
|
+
response,
|
|
72
|
+
url,
|
|
73
|
+
requestBodyValues
|
|
74
|
+
}) => {
|
|
75
|
+
const responseBody = await response.text();
|
|
76
|
+
const responseHeaders = {};
|
|
77
|
+
response.headers.forEach((value, key) => {
|
|
78
|
+
responseHeaders[key] = value;
|
|
79
|
+
});
|
|
80
|
+
const baseErrorOptions = {
|
|
81
|
+
url,
|
|
82
|
+
requestBodyValues,
|
|
83
|
+
statusCode: response.status,
|
|
84
|
+
responseHeaders,
|
|
85
|
+
responseBody
|
|
86
|
+
};
|
|
87
|
+
const trimmedBody = responseBody.trim();
|
|
88
|
+
if (trimmedBody === "") {
|
|
89
|
+
const fallback2 = response.statusText || `Request failed with status ${response.status}`;
|
|
90
|
+
return {
|
|
91
|
+
responseHeaders,
|
|
92
|
+
value: new import_provider.APICallError({
|
|
93
|
+
message: fallback2,
|
|
94
|
+
...baseErrorOptions
|
|
95
|
+
})
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
const parsedError = await (0, import_provider_utils.safeParseJSON)({
|
|
99
|
+
text: responseBody,
|
|
100
|
+
schema: friendliaiErrorSchema
|
|
101
|
+
});
|
|
102
|
+
if (parsedError.success) {
|
|
103
|
+
return {
|
|
104
|
+
responseHeaders,
|
|
105
|
+
value: new import_provider.APICallError({
|
|
106
|
+
message: friendliaiErrorStructure.errorToMessage(parsedError.value),
|
|
107
|
+
data: parsedError.value,
|
|
108
|
+
...baseErrorOptions
|
|
109
|
+
})
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
const fallback = trimmedBody || response.statusText || `Request failed with status ${response.status}`;
|
|
113
|
+
return {
|
|
114
|
+
responseHeaders,
|
|
115
|
+
value: new import_provider.APICallError({
|
|
116
|
+
message: fallback,
|
|
117
|
+
cause: parsedError.error,
|
|
118
|
+
...baseErrorOptions
|
|
119
|
+
})
|
|
120
|
+
};
|
|
49
121
|
};
|
|
50
|
-
var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(friendliaiErrorStructure);
|
|
51
122
|
|
|
52
123
|
// src/friendli-prepare-tools.ts
|
|
53
|
-
var
|
|
124
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
54
125
|
function prepareTools({
|
|
55
126
|
tools,
|
|
56
127
|
toolChoice
|
|
@@ -65,7 +136,9 @@ function prepareTools({
|
|
|
65
136
|
for (const tool of tools) {
|
|
66
137
|
if (tool.type === "provider") {
|
|
67
138
|
openaiCompatTools.push({
|
|
68
|
-
// NOTE:
|
|
139
|
+
// NOTE: Friendli tool-assisted API expects provider tool types like "web:search".
|
|
140
|
+
// We derive it from the provider tool id (e.g. "friendli.web:search" -> "web:search")
|
|
141
|
+
// instead of tool.name (often "web_search").
|
|
69
142
|
type: (_a = tool.id.split(".")[1]) != null ? _a : "unknown"
|
|
70
143
|
});
|
|
71
144
|
} else {
|
|
@@ -99,7 +172,7 @@ function prepareTools({
|
|
|
99
172
|
};
|
|
100
173
|
default: {
|
|
101
174
|
const _exhaustiveCheck = type;
|
|
102
|
-
throw new
|
|
175
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
103
176
|
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
104
177
|
});
|
|
105
178
|
}
|
|
@@ -107,6 +180,28 @@ function prepareTools({
|
|
|
107
180
|
}
|
|
108
181
|
|
|
109
182
|
// src/friendli-chat-language-model.ts
|
|
183
|
+
function isRecord(value) {
|
|
184
|
+
return typeof value === "object" && value != null;
|
|
185
|
+
}
|
|
186
|
+
function isHostedToolExecutionChunk(value) {
|
|
187
|
+
if (!isRecord(value)) return false;
|
|
188
|
+
return typeof value.status === "string" && typeof value.name === "string" && Array.isArray(value.parameters);
|
|
189
|
+
}
|
|
190
|
+
function getChunkErrorMessage(value) {
|
|
191
|
+
if (!isRecord(value)) return void 0;
|
|
192
|
+
if (typeof value.message === "string") {
|
|
193
|
+
return value.message;
|
|
194
|
+
}
|
|
195
|
+
const nestedError = value.error;
|
|
196
|
+
if (isRecord(nestedError) && typeof nestedError.message === "string") {
|
|
197
|
+
return nestedError.message;
|
|
198
|
+
}
|
|
199
|
+
return void 0;
|
|
200
|
+
}
|
|
201
|
+
function isOpenAIChatChunk(value) {
|
|
202
|
+
if (!isRecord(value)) return false;
|
|
203
|
+
return Array.isArray(value.choices);
|
|
204
|
+
}
|
|
110
205
|
var FriendliAIChatLanguageModel = class {
|
|
111
206
|
// type inferred via constructor
|
|
112
207
|
constructor(modelId, config) {
|
|
@@ -116,7 +211,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
116
211
|
this.config = config;
|
|
117
212
|
const errorStructure = friendliaiErrorStructure;
|
|
118
213
|
this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
|
|
119
|
-
this.failedResponseHandler =
|
|
214
|
+
this.failedResponseHandler = friendliaiFailedResponseHandler;
|
|
120
215
|
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
|
|
121
216
|
}
|
|
122
217
|
get provider() {
|
|
@@ -144,14 +239,20 @@ var FriendliAIChatLanguageModel = class {
|
|
|
144
239
|
}) {
|
|
145
240
|
var _a;
|
|
146
241
|
const warnings = [];
|
|
147
|
-
if (topK != null) {
|
|
148
|
-
warnings.push({ type: "unsupported", feature: "topK" });
|
|
149
|
-
}
|
|
150
242
|
const friendliOptions = await (0, import_provider_utils2.parseProviderOptions)({
|
|
243
|
+
provider: "friendliai",
|
|
244
|
+
providerOptions,
|
|
245
|
+
schema: friendliProviderOptionsSchema
|
|
246
|
+
});
|
|
247
|
+
const legacyFriendliOptions = await (0, import_provider_utils2.parseProviderOptions)({
|
|
151
248
|
provider: "friendli",
|
|
152
249
|
providerOptions,
|
|
153
250
|
schema: friendliProviderOptionsSchema
|
|
154
251
|
});
|
|
252
|
+
const options = {
|
|
253
|
+
...legacyFriendliOptions,
|
|
254
|
+
...friendliOptions
|
|
255
|
+
};
|
|
155
256
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
156
257
|
warnings.push({
|
|
157
258
|
type: "unsupported",
|
|
@@ -167,6 +268,14 @@ var FriendliAIChatLanguageModel = class {
|
|
|
167
268
|
tools,
|
|
168
269
|
toolChoice
|
|
169
270
|
});
|
|
271
|
+
const isToolsPresent = openaiTools != null && openaiTools.length > 0;
|
|
272
|
+
if (isToolsPresent && (responseFormat != null || (options == null ? void 0 : options.regex) != null)) {
|
|
273
|
+
warnings.push({
|
|
274
|
+
type: "unsupported",
|
|
275
|
+
feature: "responseFormat",
|
|
276
|
+
details: "response_format is not supported when tools are present."
|
|
277
|
+
});
|
|
278
|
+
}
|
|
170
279
|
return {
|
|
171
280
|
args: {
|
|
172
281
|
// >>> hard-coded default options >>>
|
|
@@ -178,30 +287,33 @@ var FriendliAIChatLanguageModel = class {
|
|
|
178
287
|
max_tokens: maxOutputTokens,
|
|
179
288
|
temperature,
|
|
180
289
|
top_p: topP,
|
|
290
|
+
top_k: topK,
|
|
181
291
|
frequency_penalty: frequencyPenalty,
|
|
182
292
|
presence_penalty: presencePenalty,
|
|
183
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
293
|
+
response_format: isToolsPresent === false ? (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
|
184
294
|
type: "json_schema",
|
|
185
295
|
json_schema: {
|
|
186
296
|
schema: responseFormat.schema,
|
|
187
297
|
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
188
298
|
description: responseFormat.description
|
|
189
299
|
}
|
|
190
|
-
} : { type: "json_object" } : (
|
|
300
|
+
} : { type: "json_object" } : (options == null ? void 0 : options.regex) != null ? {
|
|
191
301
|
type: "regex",
|
|
192
|
-
schema:
|
|
193
|
-
} : void 0,
|
|
302
|
+
schema: options.regex
|
|
303
|
+
} : void 0 : void 0,
|
|
194
304
|
stop: stopSequences,
|
|
195
305
|
seed,
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
306
|
+
min_p: options == null ? void 0 : options.minP,
|
|
307
|
+
repetition_penalty: options == null ? void 0 : options.repetitionPenalty,
|
|
308
|
+
xtc_threshold: options == null ? void 0 : options.xtcThreshold,
|
|
309
|
+
xtc_probability: options == null ? void 0 : options.xtcProbability,
|
|
310
|
+
...(options == null ? void 0 : options.chat_template_kwargs) ? { chat_template_kwargs: options.chat_template_kwargs } : {},
|
|
199
311
|
// messages:
|
|
200
312
|
messages: (0, import_internal.convertToOpenAICompatibleChatMessages)(prompt),
|
|
201
313
|
// tools:
|
|
202
314
|
tools: openaiTools,
|
|
203
315
|
tool_choice: openaiToolChoice,
|
|
204
|
-
parallel_tool_calls:
|
|
316
|
+
parallel_tool_calls: options == null ? void 0 : options.parallelToolCalls
|
|
205
317
|
},
|
|
206
318
|
warnings: [...warnings, ...toolWarnings]
|
|
207
319
|
};
|
|
@@ -320,9 +432,10 @@ var FriendliAIChatLanguageModel = class {
|
|
|
320
432
|
start(controller) {
|
|
321
433
|
controller.enqueue({ type: "stream-start", warnings });
|
|
322
434
|
},
|
|
323
|
-
//
|
|
435
|
+
// NOTE: Chunk values can contain OpenAI-compatible deltas, hosted tool events, and error events.
|
|
436
|
+
// We narrow with type guards for safe handling.
|
|
324
437
|
transform(chunk, controller) {
|
|
325
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
|
438
|
+
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
326
439
|
if (!chunk.success) {
|
|
327
440
|
finishReason = "error";
|
|
328
441
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -330,7 +443,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
330
443
|
}
|
|
331
444
|
const value = chunk.value;
|
|
332
445
|
metadataExtractor == null ? void 0 : metadataExtractor.processChunk(chunk.rawValue);
|
|
333
|
-
if (
|
|
446
|
+
if (isHostedToolExecutionChunk(value)) {
|
|
334
447
|
const toolCallId = (_a2 = value.tool_call_id) != null ? _a2 : (0, import_provider_utils2.generateId)();
|
|
335
448
|
switch (value.status) {
|
|
336
449
|
case "STARTED":
|
|
@@ -373,26 +486,36 @@ var FriendliAIChatLanguageModel = class {
|
|
|
373
486
|
}
|
|
374
487
|
return;
|
|
375
488
|
}
|
|
376
|
-
|
|
489
|
+
const chunkErrorMessage = getChunkErrorMessage(value);
|
|
490
|
+
if (chunkErrorMessage != null) {
|
|
491
|
+
finishReason = "error";
|
|
492
|
+
controller.enqueue({ type: "error", error: chunkErrorMessage });
|
|
493
|
+
return;
|
|
494
|
+
}
|
|
495
|
+
if (!isOpenAIChatChunk(value)) {
|
|
377
496
|
finishReason = "error";
|
|
378
|
-
controller.enqueue({
|
|
497
|
+
controller.enqueue({
|
|
498
|
+
type: "error",
|
|
499
|
+
error: new Error("Unsupported chunk shape")
|
|
500
|
+
});
|
|
379
501
|
return;
|
|
380
502
|
}
|
|
503
|
+
const chunkValue = value;
|
|
381
504
|
if (isFirstChunk) {
|
|
382
505
|
isFirstChunk = false;
|
|
383
506
|
controller.enqueue({
|
|
384
507
|
type: "response-metadata",
|
|
385
|
-
...(0, import_internal.getResponseMetadata)(
|
|
508
|
+
...(0, import_internal.getResponseMetadata)(chunkValue)
|
|
386
509
|
});
|
|
387
510
|
}
|
|
388
|
-
if (
|
|
511
|
+
if (chunkValue.usage != null) {
|
|
389
512
|
const {
|
|
390
513
|
prompt_tokens,
|
|
391
514
|
completion_tokens,
|
|
392
515
|
total_tokens,
|
|
393
516
|
prompt_tokens_details,
|
|
394
517
|
completion_tokens_details
|
|
395
|
-
} =
|
|
518
|
+
} = chunkValue.usage;
|
|
396
519
|
usage.promptTokens = prompt_tokens != null ? prompt_tokens : void 0;
|
|
397
520
|
usage.completionTokens = completion_tokens != null ? completion_tokens : void 0;
|
|
398
521
|
usage.totalTokens = total_tokens != null ? total_tokens : void 0;
|
|
@@ -409,7 +532,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
409
532
|
usage.promptTokensDetails.cachedTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
|
|
410
533
|
}
|
|
411
534
|
}
|
|
412
|
-
const choice =
|
|
535
|
+
const choice = chunkValue.choices[0];
|
|
413
536
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
414
537
|
finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason);
|
|
415
538
|
}
|
|
@@ -436,19 +559,19 @@ var FriendliAIChatLanguageModel = class {
|
|
|
436
559
|
const index = toolCallDelta.index;
|
|
437
560
|
if (toolCalls[index] == null) {
|
|
438
561
|
if (toolCallDelta.type !== "function") {
|
|
439
|
-
throw new
|
|
562
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
440
563
|
data: toolCallDelta,
|
|
441
564
|
message: `Expected 'function' type.`
|
|
442
565
|
});
|
|
443
566
|
}
|
|
444
567
|
if (toolCallDelta.id == null) {
|
|
445
|
-
throw new
|
|
568
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
446
569
|
data: toolCallDelta,
|
|
447
570
|
message: `Expected 'id' to be a string.`
|
|
448
571
|
});
|
|
449
572
|
}
|
|
450
573
|
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
|
|
451
|
-
throw new
|
|
574
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
452
575
|
data: toolCallDelta,
|
|
453
576
|
message: `Expected 'function.name' to be a string.`
|
|
454
577
|
});
|
|
@@ -493,12 +616,12 @@ var FriendliAIChatLanguageModel = class {
|
|
|
493
616
|
controller.enqueue({
|
|
494
617
|
type: "tool-input-delta",
|
|
495
618
|
id: toolCall.id,
|
|
496
|
-
delta: (_m = toolCallDelta.function.arguments) != null ?
|
|
619
|
+
delta: (_n = (_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null ? _n : ""
|
|
497
620
|
});
|
|
498
|
-
if (((
|
|
621
|
+
if (((_o = toolCall.function) == null ? void 0 : _o.name) != null && ((_p = toolCall.function) == null ? void 0 : _p.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
|
|
499
622
|
controller.enqueue({
|
|
500
623
|
type: "tool-call",
|
|
501
|
-
toolCallId: (
|
|
624
|
+
toolCallId: (_q = toolCall.id) != null ? _q : (0, import_provider_utils2.generateId)(),
|
|
502
625
|
toolName: toolCall.function.name,
|
|
503
626
|
input: toolCall.function.arguments
|
|
504
627
|
});
|
|
@@ -545,86 +668,6 @@ var FriendliAIChatLanguageModel = class {
|
|
|
545
668
|
};
|
|
546
669
|
}
|
|
547
670
|
};
|
|
548
|
-
var friendliAIChatResponseSchema = import_v42.z.object({
|
|
549
|
-
id: import_v42.z.string().nullish(),
|
|
550
|
-
created: import_v42.z.number().nullish(),
|
|
551
|
-
model: import_v42.z.string().nullish(),
|
|
552
|
-
choices: import_v42.z.array(
|
|
553
|
-
import_v42.z.object({
|
|
554
|
-
message: import_v42.z.object({
|
|
555
|
-
role: import_v42.z.literal("assistant").nullish(),
|
|
556
|
-
content: import_v42.z.string().nullish(),
|
|
557
|
-
tool_calls: import_v42.z.array(
|
|
558
|
-
import_v42.z.object({
|
|
559
|
-
id: import_v42.z.string().nullish(),
|
|
560
|
-
type: import_v42.z.literal("function"),
|
|
561
|
-
function: import_v42.z.object({
|
|
562
|
-
name: import_v42.z.string(),
|
|
563
|
-
arguments: import_v42.z.union([import_v42.z.string(), import_v42.z.any()]).nullish()
|
|
564
|
-
})
|
|
565
|
-
})
|
|
566
|
-
).nullish()
|
|
567
|
-
}),
|
|
568
|
-
finish_reason: import_v42.z.string().nullish()
|
|
569
|
-
})
|
|
570
|
-
),
|
|
571
|
-
usage: import_v42.z.object({
|
|
572
|
-
prompt_tokens: import_v42.z.number().nullish(),
|
|
573
|
-
completion_tokens: import_v42.z.number().nullish()
|
|
574
|
-
}).nullish()
|
|
575
|
-
});
|
|
576
|
-
var friendliaiChatChunkSchema = import_v42.z.union([
|
|
577
|
-
import_v42.z.object({
|
|
578
|
-
id: import_v42.z.string().nullish(),
|
|
579
|
-
created: import_v42.z.number().nullish(),
|
|
580
|
-
model: import_v42.z.string().nullish(),
|
|
581
|
-
choices: import_v42.z.array(
|
|
582
|
-
import_v42.z.object({
|
|
583
|
-
delta: import_v42.z.object({
|
|
584
|
-
role: import_v42.z.enum(["assistant"]).nullish(),
|
|
585
|
-
content: import_v42.z.string().nullish(),
|
|
586
|
-
tool_calls: import_v42.z.array(
|
|
587
|
-
import_v42.z.object({
|
|
588
|
-
index: import_v42.z.number(),
|
|
589
|
-
id: import_v42.z.string().nullish(),
|
|
590
|
-
type: import_v42.z.literal("function").optional(),
|
|
591
|
-
function: import_v42.z.object({
|
|
592
|
-
name: import_v42.z.string().nullish(),
|
|
593
|
-
arguments: import_v42.z.string().nullish()
|
|
594
|
-
})
|
|
595
|
-
})
|
|
596
|
-
).nullish()
|
|
597
|
-
}).nullish(),
|
|
598
|
-
finish_reason: import_v42.z.string().nullish()
|
|
599
|
-
})
|
|
600
|
-
),
|
|
601
|
-
usage: import_v42.z.object({
|
|
602
|
-
prompt_tokens: import_v42.z.number().nullish(),
|
|
603
|
-
completion_tokens: import_v42.z.number().nullish()
|
|
604
|
-
}).nullish()
|
|
605
|
-
}),
|
|
606
|
-
import_v42.z.object({
|
|
607
|
-
name: import_v42.z.string(),
|
|
608
|
-
status: import_v42.z.enum(["ENDED", "STARTED", "ERRORED", "UPDATING"]),
|
|
609
|
-
message: import_v42.z.null(),
|
|
610
|
-
parameters: import_v42.z.array(
|
|
611
|
-
import_v42.z.object({
|
|
612
|
-
name: import_v42.z.string(),
|
|
613
|
-
value: import_v42.z.string()
|
|
614
|
-
})
|
|
615
|
-
),
|
|
616
|
-
result: import_v42.z.string().nullable(),
|
|
617
|
-
error: import_v42.z.object({
|
|
618
|
-
type: import_v42.z.enum(["INVALID_PARAMETER", "UNKNOWN"]),
|
|
619
|
-
msg: import_v42.z.string()
|
|
620
|
-
}).nullable(),
|
|
621
|
-
timestamp: import_v42.z.number(),
|
|
622
|
-
usage: import_v42.z.null(),
|
|
623
|
-
tool_call_id: import_v42.z.string().nullable()
|
|
624
|
-
// temporary fix for "file:text" tool calls
|
|
625
|
-
}),
|
|
626
|
-
friendliaiErrorSchema
|
|
627
|
-
]);
|
|
628
671
|
var openaiCompatibleTokenUsageSchema = import_v42.z.object({
|
|
629
672
|
prompt_tokens: import_v42.z.number().nullish(),
|
|
630
673
|
completion_tokens: import_v42.z.number().nullish(),
|
|
@@ -723,7 +766,23 @@ var friendliProviderOptionsSchema = import_v42.z.object({
|
|
|
723
766
|
*/
|
|
724
767
|
// regex: z.instanceof(RegExp).nullish(),
|
|
725
768
|
regex: import_v42.z.string().nullish(),
|
|
726
|
-
chat_template_kwargs: import_v42.z.record(import_v42.z.string(), import_v42.z.any()).nullish()
|
|
769
|
+
chat_template_kwargs: import_v42.z.record(import_v42.z.string(), import_v42.z.any()).nullish(),
|
|
770
|
+
/**
|
|
771
|
+
* A scaling factor used to determine the minimum token probability threshold.
|
|
772
|
+
*/
|
|
773
|
+
minP: import_v42.z.number().nullish(),
|
|
774
|
+
/**
|
|
775
|
+
* Penalizes tokens that have already appeared in the generated result.
|
|
776
|
+
*/
|
|
777
|
+
repetitionPenalty: import_v42.z.number().nullish(),
|
|
778
|
+
/**
|
|
779
|
+
* A probability threshold used to identify “top choice” tokens for exclusion in XTC sampling.
|
|
780
|
+
*/
|
|
781
|
+
xtcThreshold: import_v42.z.number().nullish(),
|
|
782
|
+
/**
|
|
783
|
+
* The probability that XTC (Exclude Top Choices) filtering will be applied for each sampling decision.
|
|
784
|
+
*/
|
|
785
|
+
xtcProbability: import_v42.z.number().nullish()
|
|
727
786
|
});
|
|
728
787
|
|
|
729
788
|
// src/friendli-settings.ts
|
|
@@ -754,53 +813,64 @@ var FriendliAIServerlessModelIds = [
|
|
|
754
813
|
|
|
755
814
|
// src/friendli-tools.ts
|
|
756
815
|
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
816
|
+
var import_v43 = require("zod/v4");
|
|
817
|
+
var inputSchema = import_v43.z.object({}).loose();
|
|
818
|
+
var outputSchema = import_v43.z.unknown();
|
|
819
|
+
var webSearchTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
820
|
+
id: "friendli.web:search",
|
|
821
|
+
inputSchema,
|
|
822
|
+
outputSchema
|
|
823
|
+
});
|
|
824
|
+
var webUrlTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
825
|
+
id: "friendli.web:url",
|
|
826
|
+
inputSchema,
|
|
827
|
+
outputSchema
|
|
828
|
+
});
|
|
829
|
+
var mathCalendarTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
830
|
+
id: "friendli.math:calendar",
|
|
831
|
+
inputSchema,
|
|
832
|
+
outputSchema
|
|
833
|
+
});
|
|
834
|
+
var mathStatisticsTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
835
|
+
id: "friendli.math:statistics",
|
|
836
|
+
inputSchema,
|
|
837
|
+
outputSchema
|
|
838
|
+
});
|
|
839
|
+
var mathCalculatorTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
840
|
+
id: "friendli.math:calculator",
|
|
841
|
+
inputSchema,
|
|
842
|
+
outputSchema
|
|
843
|
+
});
|
|
844
|
+
var codePythonInterpreterTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
845
|
+
id: "friendli.code:python-interpreter",
|
|
846
|
+
inputSchema,
|
|
847
|
+
outputSchema
|
|
848
|
+
});
|
|
849
|
+
var linkupSearchTool = (0, import_provider_utils3.createProviderToolFactoryWithOutputSchema)({
|
|
850
|
+
id: "friendli.linkup:search",
|
|
851
|
+
inputSchema,
|
|
852
|
+
outputSchema
|
|
853
|
+
});
|
|
757
854
|
function webSearch() {
|
|
758
|
-
return {
|
|
759
|
-
type: "provider",
|
|
760
|
-
id: "friendli.web:search",
|
|
761
|
-
args: {},
|
|
762
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
763
|
-
};
|
|
855
|
+
return webSearchTool({});
|
|
764
856
|
}
|
|
765
857
|
function webUrl() {
|
|
766
|
-
return {
|
|
767
|
-
type: "provider",
|
|
768
|
-
id: "friendli.web:url",
|
|
769
|
-
args: {},
|
|
770
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
771
|
-
};
|
|
858
|
+
return webUrlTool({});
|
|
772
859
|
}
|
|
773
860
|
function mathCalendar() {
|
|
774
|
-
return {
|
|
775
|
-
type: "provider",
|
|
776
|
-
id: "friendli.math:calendar",
|
|
777
|
-
args: {},
|
|
778
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
779
|
-
};
|
|
861
|
+
return mathCalendarTool({});
|
|
780
862
|
}
|
|
781
863
|
function mathStatistics() {
|
|
782
|
-
return {
|
|
783
|
-
type: "provider",
|
|
784
|
-
id: "friendli.math:statistics",
|
|
785
|
-
args: {},
|
|
786
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
787
|
-
};
|
|
864
|
+
return mathStatisticsTool({});
|
|
788
865
|
}
|
|
789
866
|
function mathCalculator() {
|
|
790
|
-
return {
|
|
791
|
-
type: "provider",
|
|
792
|
-
id: "friendli.math:calculator",
|
|
793
|
-
args: {},
|
|
794
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
795
|
-
};
|
|
867
|
+
return mathCalculatorTool({});
|
|
796
868
|
}
|
|
797
869
|
function codePythonInterpreter() {
|
|
798
|
-
return {
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
inputSchema: (0, import_provider_utils3.jsonSchema)({ type: "object", properties: {} })
|
|
803
|
-
};
|
|
870
|
+
return codePythonInterpreterTool({});
|
|
871
|
+
}
|
|
872
|
+
function linkupSearch() {
|
|
873
|
+
return linkupSearchTool({});
|
|
804
874
|
}
|
|
805
875
|
var friendliTools = {
|
|
806
876
|
webSearch,
|
|
@@ -808,7 +878,8 @@ var friendliTools = {
|
|
|
808
878
|
mathCalendar,
|
|
809
879
|
mathStatistics,
|
|
810
880
|
mathCalculator,
|
|
811
|
-
codePythonInterpreter
|
|
881
|
+
codePythonInterpreter,
|
|
882
|
+
linkupSearch
|
|
812
883
|
};
|
|
813
884
|
|
|
814
885
|
// src/get-available-models.ts
|
|
@@ -975,7 +1046,8 @@ function createFriendli(options = {}) {
|
|
|
975
1046
|
provider: `friendliai.${type}.chat`,
|
|
976
1047
|
url: ({ path }) => `${baseURL}${path}`,
|
|
977
1048
|
headers: getHeaders,
|
|
978
|
-
fetch: options.fetch
|
|
1049
|
+
fetch: options.fetch,
|
|
1050
|
+
includeUsage: options.includeUsage
|
|
979
1051
|
});
|
|
980
1052
|
};
|
|
981
1053
|
const createCompletionModel = (modelId) => {
|
|
@@ -989,16 +1061,16 @@ function createFriendli(options = {}) {
|
|
|
989
1061
|
});
|
|
990
1062
|
};
|
|
991
1063
|
const createTextEmbeddingModel = (modelId) => {
|
|
992
|
-
throw new
|
|
1064
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "embeddingModel" });
|
|
993
1065
|
};
|
|
994
1066
|
const createImageModel = (modelId) => {
|
|
995
|
-
throw new
|
|
1067
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });
|
|
996
1068
|
};
|
|
997
1069
|
const createTranscriptionModel = (modelId) => {
|
|
998
|
-
throw new
|
|
1070
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
999
1071
|
};
|
|
1000
1072
|
const createSpeechModel = (modelId) => {
|
|
1001
|
-
throw new
|
|
1073
|
+
throw new import_provider4.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
1002
1074
|
};
|
|
1003
1075
|
const provider = (modelId) => createLanguageModel(modelId);
|
|
1004
1076
|
provider.languageModel = createLanguageModel;
|