@friendliai/ai-provider 0.3.0-beta.6 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +18 -99
- package/dist/index.d.mts +30 -1
- package/dist/index.d.ts +30 -1
- package/dist/index.js +126 -24
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +125 -26
- package/dist/index.mjs.map +1 -1
- package/package.json +6 -6
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,27 @@
|
|
|
1
1
|
# @friendliai/ai-provider
|
|
2
2
|
|
|
3
|
+
## 0.3.0
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 10ab7aa: add chat_template_kwargs option
|
|
8
|
+
- 10ab7aa: Added supported type model to friendli-provider
|
|
9
|
+
- 10ab7aa: bump v5 package latest
|
|
10
|
+
- 10ab7aa: dump deps (alpha.6)
|
|
11
|
+
- 10ab7aa: Initial build for ai sdk v5 support (broken Friendli compatibility)
|
|
12
|
+
- 10ab7aa: As of 25.08.19, the latest serverless list and model list are synchronized.
|
|
13
|
+
- 10ab7aa: enable regex on v5 (beta)
|
|
14
|
+
- 10ab7aa: Add getAvailableModels() for dynamic model discovery
|
|
15
|
+
ref: https://vercel.com/docs/ai-gateway/models-and-providers#dynamic-model-discovery
|
|
16
|
+
- 10ab7aa: enable support Tool Assisted API
|
|
17
|
+
|
|
18
|
+
## 0.3.0-beta.7
|
|
19
|
+
|
|
20
|
+
### Patch Changes
|
|
21
|
+
|
|
22
|
+
- ac374ab: Add getAvailableModels() for dynamic model discovery
|
|
23
|
+
ref: https://vercel.com/docs/ai-gateway/models-and-providers#dynamic-model-discovery
|
|
24
|
+
|
|
3
25
|
## 0.3.0-beta.6
|
|
4
26
|
|
|
5
27
|
### Patch Changes
|
package/README.md
CHANGED
|
@@ -1,115 +1,34 @@
|
|
|
1
|
+
<!-- header start -->
|
|
2
|
+
<p align="center">
|
|
3
|
+
<img src="https://huggingface.co/datasets/FriendliAI/documentation-images/resolve/main/model-card-assets/friendliai.png" width="100%" alt="FriendliAI Logo">
|
|
4
|
+
</p>
|
|
5
|
+
<!-- header end -->
|
|
6
|
+
|
|
1
7
|
# @friendliai/ai-provider
|
|
2
8
|
|
|
3
|
-
|
|
9
|
+
[](https://www.npmjs.com/package/@friendliai/ai-provider)
|
|
10
|
+
[](https://www.npmjs.com/package/@friendliai/ai-provider)
|
|
4
11
|
|
|
5
|
-
|
|
12
|
+
A provider to use FriendliAI models with the Vercel AI SDK and OpenAI-compatible APIs.
|
|
6
13
|
|
|
7
|
-
|
|
14
|
+
## Installation
|
|
8
15
|
|
|
9
16
|
```bash
|
|
10
17
|
npm i @friendliai/ai-provider
|
|
11
18
|
```
|
|
12
19
|
|
|
13
|
-
##
|
|
14
|
-
|
|
15
|
-
The tokens required for model usage can be obtained from the [Friendli suite](https://friendli.ai/suite/).
|
|
16
|
-
|
|
17
|
-
To use the provider, you need to set the `FRIENDLI_TOKEN` environment variable with your personal access token.
|
|
18
|
-
|
|
19
|
-
```bash
|
|
20
|
-
export FRIENDLI_TOKEN="YOUR_FRIENDLI_TOKEN"
|
|
21
|
-
```
|
|
22
|
-
|
|
23
|
-
Check the [FriendliAI documentation](https://friendli.ai/docs/guides/personal_access_tokens) for more information.
|
|
24
|
-
|
|
25
|
-
## Provider Instance
|
|
20
|
+
## Example
|
|
26
21
|
|
|
27
22
|
```ts
|
|
28
|
-
import { friendli } from '@friendliai/ai-provider'
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
## Language Models
|
|
32
|
-
|
|
33
|
-
You can create [FriendliAI models](https://friendli.ai/docs/guides/serverless_endpoints/text_generation#model-supports) using a provider instance.
|
|
34
|
-
The first argument is the model id, e.g. `meta-llama-3.1-8b-instruct`.
|
|
35
|
-
|
|
36
|
-
```ts
|
|
37
|
-
const model = friendli('meta-llama-3.1-8b-instruct')
|
|
38
|
-
```
|
|
39
|
-
|
|
40
|
-
### Example: Generating text
|
|
41
|
-
|
|
42
|
-
You can use FriendliAI language models to generate text with the `generateText` function:
|
|
43
|
-
|
|
44
|
-
```ts
|
|
45
|
-
import { friendli } from "@friendliai/ai-provider";
|
|
46
|
-
import { generateText } from 'ai'
|
|
23
|
+
import { friendli } from '@friendliai/ai-provider';
|
|
24
|
+
import { generateText } from 'ai';
|
|
47
25
|
|
|
48
26
|
const { text } = await generateText({
|
|
49
|
-
model: friendli('meta-llama-3.
|
|
27
|
+
model: friendli('meta-llama-3.3-70b-instruct'),
|
|
50
28
|
prompt: 'What is the meaning of life?',
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
### Example: Using Enforcing Patterns (Regex)
|
|
55
|
-
|
|
56
|
-
Specify a specific pattern (e.g., CSV), character sets, or specific language characters (e.g., Korean Hangul characters) for your LLM's output.
|
|
57
|
-
|
|
58
|
-
```ts
|
|
59
|
-
import { friendli } from '@friendliai/ai-provider'
|
|
60
|
-
import { generateText } from 'ai'
|
|
61
|
-
|
|
62
|
-
const { text } = await generateText({
|
|
63
|
-
model: friendli('meta-llama-3.1-8b-instruct', {
|
|
64
|
-
regex: '[\n ,.?!0-9\uac00-\ud7af]*',
|
|
65
|
-
}),
|
|
66
|
-
maxTokens: 40,
|
|
67
|
-
prompt: 'who is the first king of the Joseon Dynasty?',
|
|
68
|
-
})
|
|
69
|
-
|
|
70
|
-
console.log(text)
|
|
29
|
+
maxTokens: 20,
|
|
30
|
+
});
|
|
31
|
+
console.log(text);
|
|
71
32
|
```
|
|
72
33
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
If you use `@friendliai/ai-provider`, you can use the [built-in tools](https://friendli.ai/docs/guides/serverless_endpoints/tools/built_in_tools) via the `tools` option.
|
|
76
|
-
|
|
77
|
-
Built-in tools allow models to use tools to generate better answers. For example, a `web:search` tool can provide up-to-date answers to current questions.
|
|
78
|
-
|
|
79
|
-
```ts highlight="1,8,9,10,11,12,13,14,15"
|
|
80
|
-
import { friendli } from '@friendliai/ai-provider'
|
|
81
|
-
import { convertToCoreMessages, streamText } from 'ai'
|
|
82
|
-
|
|
83
|
-
export async function POST(req: Request) {
|
|
84
|
-
const { messages } = await req.json()
|
|
85
|
-
|
|
86
|
-
const result = await streamText({
|
|
87
|
-
model: friendli('meta-llama-3.1-8b-instruct', {
|
|
88
|
-
tools: [
|
|
89
|
-
{ type: 'web:search' },
|
|
90
|
-
{ type: 'math:calculator' },
|
|
91
|
-
{ type: 'code:python-interpreter' }, // and more tools..!!
|
|
92
|
-
],
|
|
93
|
-
}),
|
|
94
|
-
messages: convertToCoreMessages(messages),
|
|
95
|
-
})
|
|
96
|
-
|
|
97
|
-
return result.toDataStreamResponse()
|
|
98
|
-
}
|
|
99
|
-
```
|
|
100
|
-
|
|
101
|
-
FriendliAI language models can also be used in the `streamText`, `generateObject`, `streamObject`, and `streamUI` functions.
|
|
102
|
-
(see [AI SDK Core](/docs/ai-sdk-core) and [AI SDK RSC](/docs/ai-sdk-rsc)).
|
|
103
|
-
|
|
104
|
-
## OpenAI Compatibility
|
|
105
|
-
|
|
106
|
-
We can also use `@ai-sdk/openai` with OpenAI compatibility.
|
|
107
|
-
|
|
108
|
-
```ts
|
|
109
|
-
import { createOpenAI } from '@ai-sdk/openai'
|
|
110
|
-
|
|
111
|
-
const friendli = createOpenAI({
|
|
112
|
-
baseURL: 'https://api.friendli.ai/serverless/v1',
|
|
113
|
-
apiKey: process.env.FRIENDLI_TOKEN,
|
|
114
|
-
})
|
|
115
|
-
```
|
|
34
|
+
For more details, see the [official documentation](https://friendli.ai/docs/).
|
package/dist/index.d.mts
CHANGED
|
@@ -6,6 +6,27 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
type Pricing = {
|
|
10
|
+
inputToken?: number;
|
|
11
|
+
outputToken?: number;
|
|
12
|
+
responseTime?: number;
|
|
13
|
+
unitType?: 'TOKEN' | 'SECOND';
|
|
14
|
+
currency?: string;
|
|
15
|
+
unit?: string;
|
|
16
|
+
};
|
|
17
|
+
type FriendliAvailableModel = {
|
|
18
|
+
id: string;
|
|
19
|
+
name?: string | null;
|
|
20
|
+
description?: string | null;
|
|
21
|
+
pricing?: Pricing;
|
|
22
|
+
warm?: boolean;
|
|
23
|
+
cold?: boolean;
|
|
24
|
+
contextLength?: number | null;
|
|
25
|
+
};
|
|
26
|
+
type FriendliAvailableModelsResponse = {
|
|
27
|
+
models: FriendliAvailableModel[];
|
|
28
|
+
};
|
|
29
|
+
|
|
9
30
|
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
31
|
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
32
|
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
@@ -65,6 +86,14 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
65
86
|
* Creates a text embedding model for text generation.
|
|
66
87
|
* TODO: Implement for Dedicated users
|
|
67
88
|
*/
|
|
89
|
+
embedding(modelId: string & {}): LanguageModelV2;
|
|
90
|
+
textEmbeddingModel(modelId: string & {}): LanguageModelV2;
|
|
91
|
+
/**
|
|
92
|
+
* Returns the available models and their metadata.
|
|
93
|
+
*/
|
|
94
|
+
getAvailableModels(options?: {
|
|
95
|
+
graphqlURL?: string;
|
|
96
|
+
}): Promise<FriendliAvailableModelsResponse>;
|
|
68
97
|
embedding(modelId: string & {}): EmbeddingModelV2<string>;
|
|
69
98
|
textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;
|
|
70
99
|
/**
|
|
@@ -102,4 +131,4 @@ declare const friendliaiErrorSchema: z.ZodObject<{
|
|
|
102
131
|
}, z.core.$strip>;
|
|
103
132
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
104
133
|
|
|
105
|
-
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
|
134
|
+
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, type FriendliAvailableModel, type FriendliAvailableModelsResponse, createFriendli, friendli };
|
package/dist/index.d.ts
CHANGED
|
@@ -6,6 +6,27 @@ declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruc
|
|
|
6
6
|
type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
|
|
7
7
|
type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
|
|
8
8
|
|
|
9
|
+
type Pricing = {
|
|
10
|
+
inputToken?: number;
|
|
11
|
+
outputToken?: number;
|
|
12
|
+
responseTime?: number;
|
|
13
|
+
unitType?: 'TOKEN' | 'SECOND';
|
|
14
|
+
currency?: string;
|
|
15
|
+
unit?: string;
|
|
16
|
+
};
|
|
17
|
+
type FriendliAvailableModel = {
|
|
18
|
+
id: string;
|
|
19
|
+
name?: string | null;
|
|
20
|
+
description?: string | null;
|
|
21
|
+
pricing?: Pricing;
|
|
22
|
+
warm?: boolean;
|
|
23
|
+
cold?: boolean;
|
|
24
|
+
contextLength?: number | null;
|
|
25
|
+
};
|
|
26
|
+
type FriendliAvailableModelsResponse = {
|
|
27
|
+
models: FriendliAvailableModel[];
|
|
28
|
+
};
|
|
29
|
+
|
|
9
30
|
declare function webUrlBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
10
31
|
declare function webSearchBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
11
32
|
declare function mathCalendarBetaTool(): LanguageModelV2ProviderDefinedTool;
|
|
@@ -65,6 +86,14 @@ interface FriendliAIProvider extends ProviderV2 {
|
|
|
65
86
|
* Creates a text embedding model for text generation.
|
|
66
87
|
* TODO: Implement for Dedicated users
|
|
67
88
|
*/
|
|
89
|
+
embedding(modelId: string & {}): LanguageModelV2;
|
|
90
|
+
textEmbeddingModel(modelId: string & {}): LanguageModelV2;
|
|
91
|
+
/**
|
|
92
|
+
* Returns the available models and their metadata.
|
|
93
|
+
*/
|
|
94
|
+
getAvailableModels(options?: {
|
|
95
|
+
graphqlURL?: string;
|
|
96
|
+
}): Promise<FriendliAvailableModelsResponse>;
|
|
68
97
|
embedding(modelId: string & {}): EmbeddingModelV2<string>;
|
|
69
98
|
textEmbeddingModel(modelId: string & {}): EmbeddingModelV2<string>;
|
|
70
99
|
/**
|
|
@@ -102,4 +131,4 @@ declare const friendliaiErrorSchema: z.ZodObject<{
|
|
|
102
131
|
}, z.core.$strip>;
|
|
103
132
|
type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
|
|
104
133
|
|
|
105
|
-
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, createFriendli, friendli };
|
|
134
|
+
export { type FriendliAIErrorData, type FriendliAIProvider, type FriendliAIProviderSettings, type FriendliAvailableModel, type FriendliAvailableModelsResponse, createFriendli, friendli };
|
package/dist/index.js
CHANGED
|
@@ -27,7 +27,7 @@ module.exports = __toCommonJS(index_exports);
|
|
|
27
27
|
|
|
28
28
|
// src/friendli-provider.ts
|
|
29
29
|
var import_provider3 = require("@ai-sdk/provider");
|
|
30
|
-
var
|
|
30
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
31
31
|
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
32
32
|
|
|
33
33
|
// src/friendli-settings.ts
|
|
@@ -72,9 +72,7 @@ var friendliaiErrorStructure = {
|
|
|
72
72
|
errorSchema: friendliaiErrorSchema,
|
|
73
73
|
errorToMessage: (data) => data.message
|
|
74
74
|
};
|
|
75
|
-
var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(
|
|
76
|
-
friendliaiErrorStructure
|
|
77
|
-
);
|
|
75
|
+
var friendliaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(friendliaiErrorStructure);
|
|
78
76
|
|
|
79
77
|
// src/friendli-prepare-tools.ts
|
|
80
78
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -142,12 +140,8 @@ var FriendliAIChatLanguageModel = class {
|
|
|
142
140
|
this.modelId = modelId;
|
|
143
141
|
this.config = config;
|
|
144
142
|
const errorStructure = friendliaiErrorStructure;
|
|
145
|
-
this.chunkSchema = createOpenAICompatibleChatChunkSchema(
|
|
146
|
-
|
|
147
|
-
);
|
|
148
|
-
this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(
|
|
149
|
-
friendliaiErrorStructure
|
|
150
|
-
);
|
|
143
|
+
this.chunkSchema = createOpenAICompatibleChatChunkSchema(errorStructure.errorSchema);
|
|
144
|
+
this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(friendliaiErrorStructure);
|
|
151
145
|
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : true;
|
|
152
146
|
}
|
|
153
147
|
get provider() {
|
|
@@ -253,9 +247,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
253
247
|
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
254
248
|
body: args,
|
|
255
249
|
failedResponseHandler: this.failedResponseHandler,
|
|
256
|
-
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
257
|
-
OpenAICompatibleChatResponseSchema
|
|
258
|
-
),
|
|
250
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(OpenAICompatibleChatResponseSchema),
|
|
259
251
|
abortSignal: options.abortSignal,
|
|
260
252
|
fetch: this.config.fetch
|
|
261
253
|
});
|
|
@@ -320,9 +312,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
320
312
|
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
321
313
|
body,
|
|
322
314
|
failedResponseHandler: this.failedResponseHandler,
|
|
323
|
-
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
|
324
|
-
this.chunkSchema
|
|
325
|
-
),
|
|
315
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(this.chunkSchema),
|
|
326
316
|
abortSignal: options.abortSignal,
|
|
327
317
|
fetch: this.config.fetch
|
|
328
318
|
});
|
|
@@ -397,9 +387,7 @@ var FriendliAIChatLanguageModel = class {
|
|
|
397
387
|
}
|
|
398
388
|
const choice = value.choices[0];
|
|
399
389
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
400
|
-
finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(
|
|
401
|
-
choice.finish_reason
|
|
402
|
-
);
|
|
390
|
+
finishReason = (0, import_internal.mapOpenAICompatibleFinishReason)(choice.finish_reason);
|
|
403
391
|
}
|
|
404
392
|
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
405
393
|
return;
|
|
@@ -689,6 +677,113 @@ var friendliProviderOptionsSchema = import_zod2.z.object({
|
|
|
689
677
|
chat_template_kwargs: import_zod2.z.record(import_zod2.z.string(), import_zod2.z.any()).nullish()
|
|
690
678
|
});
|
|
691
679
|
|
|
680
|
+
// src/get-available-models.ts
|
|
681
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
682
|
+
var DEFAULT_GRAPHQL_URL = "https://api-internal.friendli.ai/api/graphql";
|
|
683
|
+
async function postGraphQL(url, body, headers) {
|
|
684
|
+
const res = await fetch(url, {
|
|
685
|
+
method: "POST",
|
|
686
|
+
headers: {
|
|
687
|
+
"Content-Type": "application/json",
|
|
688
|
+
...headers
|
|
689
|
+
},
|
|
690
|
+
body: JSON.stringify(body)
|
|
691
|
+
});
|
|
692
|
+
let json;
|
|
693
|
+
try {
|
|
694
|
+
json = await res.json();
|
|
695
|
+
} catch (err) {
|
|
696
|
+
console.error(
|
|
697
|
+
"Failed to parse JSON response from Friendli API:",
|
|
698
|
+
err,
|
|
699
|
+
"Status:",
|
|
700
|
+
res.status,
|
|
701
|
+
res.statusText
|
|
702
|
+
);
|
|
703
|
+
throw new Error(
|
|
704
|
+
`Failed to parse JSON response from Friendli API: ${err instanceof Error ? err.message : String(err)}`
|
|
705
|
+
);
|
|
706
|
+
}
|
|
707
|
+
return json;
|
|
708
|
+
}
|
|
709
|
+
function normalizePriceUnit(unit) {
|
|
710
|
+
if (!unit) return void 0;
|
|
711
|
+
return unit;
|
|
712
|
+
}
|
|
713
|
+
async function getAvailableModelsImpl(options) {
|
|
714
|
+
var _a, _b, _c, _d, _e, _f;
|
|
715
|
+
let token;
|
|
716
|
+
try {
|
|
717
|
+
token = (_a = options.apiKey) != null ? _a : (0, import_provider_utils3.loadApiKey)({
|
|
718
|
+
apiKey: void 0,
|
|
719
|
+
environmentVariableName: "FRIENDLI_TOKEN",
|
|
720
|
+
description: "FRIENDLI_TOKEN"
|
|
721
|
+
});
|
|
722
|
+
} catch (e) {
|
|
723
|
+
token = void 0;
|
|
724
|
+
}
|
|
725
|
+
const headers = {
|
|
726
|
+
...token ? { Authorization: `Bearer ${token}` } : {},
|
|
727
|
+
...options.teamId ? { "X-Friendli-Team": options.teamId } : {},
|
|
728
|
+
...(_b = options.headers) != null ? _b : {}
|
|
729
|
+
};
|
|
730
|
+
const url = (_c = options.graphqlURL) != null ? _c : DEFAULT_GRAPHQL_URL;
|
|
731
|
+
const query = `
|
|
732
|
+
query Edges {
|
|
733
|
+
serverlessEndpoints {
|
|
734
|
+
edges {
|
|
735
|
+
... on ServerlessChatEndpointCatalog {
|
|
736
|
+
id
|
|
737
|
+
name
|
|
738
|
+
status
|
|
739
|
+
price {
|
|
740
|
+
inputPrice
|
|
741
|
+
outputPrice
|
|
742
|
+
unit
|
|
743
|
+
responseTimePrice
|
|
744
|
+
priceUnitType
|
|
745
|
+
}
|
|
746
|
+
contextLength
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
`;
|
|
752
|
+
const resp = await postGraphQL(
|
|
753
|
+
url,
|
|
754
|
+
{ query, variables: {}, operationName: "Edges" },
|
|
755
|
+
headers
|
|
756
|
+
);
|
|
757
|
+
if (resp.errors && resp.errors.length > 0) {
|
|
758
|
+
throw new Error(
|
|
759
|
+
`getAvailableModels: GraphQL error: ${resp.errors.map((e) => e.message).join("; ")}`
|
|
760
|
+
);
|
|
761
|
+
}
|
|
762
|
+
const edges = (_f = (_e = (_d = resp.data) == null ? void 0 : _d.serverlessEndpoints) == null ? void 0 : _e.edges) != null ? _f : [];
|
|
763
|
+
const models = edges.map((e) => {
|
|
764
|
+
var _a2, _b2, _c2, _d2, _e2, _f2;
|
|
765
|
+
const warm = e.status === "WARM";
|
|
766
|
+
const pricing = e.price ? {
|
|
767
|
+
inputToken: (_a2 = e.price.inputPrice) != null ? _a2 : void 0,
|
|
768
|
+
outputToken: (_b2 = e.price.outputPrice) != null ? _b2 : void 0,
|
|
769
|
+
responseTime: (_c2 = e.price.responseTimePrice) != null ? _c2 : void 0,
|
|
770
|
+
unitType: (_d2 = e.price.priceUnitType) != null ? _d2 : void 0,
|
|
771
|
+
unit: normalizePriceUnit(e.price.unit),
|
|
772
|
+
currency: "USD"
|
|
773
|
+
} : void 0;
|
|
774
|
+
return {
|
|
775
|
+
id: e.id,
|
|
776
|
+
name: (_e2 = e.name) != null ? _e2 : void 0,
|
|
777
|
+
description: void 0,
|
|
778
|
+
pricing,
|
|
779
|
+
warm,
|
|
780
|
+
cold: warm === false,
|
|
781
|
+
contextLength: (_f2 = e.contextLength) != null ? _f2 : void 0
|
|
782
|
+
};
|
|
783
|
+
});
|
|
784
|
+
return { models };
|
|
785
|
+
}
|
|
786
|
+
|
|
692
787
|
// src/friendli-tools.ts
|
|
693
788
|
function webUrlBetaTool() {
|
|
694
789
|
return {
|
|
@@ -750,7 +845,7 @@ var friendliTools = {
|
|
|
750
845
|
// src/friendli-provider.ts
|
|
751
846
|
function createFriendli(options = {}) {
|
|
752
847
|
const getHeaders = () => ({
|
|
753
|
-
Authorization: `Bearer ${(0,
|
|
848
|
+
Authorization: `Bearer ${(0, import_provider_utils4.loadApiKey)({
|
|
754
849
|
apiKey: options.apiKey,
|
|
755
850
|
environmentVariableName: "FRIENDLI_TOKEN",
|
|
756
851
|
description: "FRIENDLI_TOKEN"
|
|
@@ -764,7 +859,7 @@ function createFriendli(options = {}) {
|
|
|
764
859
|
serverless_tools: "https://api.friendli.ai/serverless/tools/v1",
|
|
765
860
|
dedicated: "https://api.friendli.ai/dedicated/v1"
|
|
766
861
|
};
|
|
767
|
-
const customBaseURL = (0,
|
|
862
|
+
const customBaseURL = (0, import_provider_utils4.withoutTrailingSlash)(baseURL);
|
|
768
863
|
if (typeof customBaseURL === "string" && customBaseURL !== "dedicated" && customBaseURL !== "serverless" && customBaseURL !== "serverless-tools") {
|
|
769
864
|
return { baseURL: customBaseURL, type: "custom" };
|
|
770
865
|
}
|
|
@@ -785,9 +880,7 @@ function createFriendli(options = {}) {
|
|
|
785
880
|
type: "serverless-tools"
|
|
786
881
|
};
|
|
787
882
|
default:
|
|
788
|
-
if (FriendliAIServerlessModelIds.includes(
|
|
789
|
-
modelId
|
|
790
|
-
)) {
|
|
883
|
+
if (FriendliAIServerlessModelIds.includes(modelId)) {
|
|
791
884
|
return {
|
|
792
885
|
baseURL: FriendliBaseURL.serverless,
|
|
793
886
|
type: "serverless"
|
|
@@ -837,6 +930,15 @@ function createFriendli(options = {}) {
|
|
|
837
930
|
provider.completion = createCompletionModel;
|
|
838
931
|
provider.embedding = createTextEmbeddingModel;
|
|
839
932
|
provider.textEmbeddingModel = createTextEmbeddingModel;
|
|
933
|
+
provider.getAvailableModels = async (opts) => {
|
|
934
|
+
var _a;
|
|
935
|
+
const defaultURL = "https://api-internal.friendli.ai/api/graphql";
|
|
936
|
+
const graphqlURL = (_a = opts == null ? void 0 : opts.graphqlURL) != null ? _a : defaultURL;
|
|
937
|
+
const apiKey = options.apiKey;
|
|
938
|
+
const teamId = options.teamId;
|
|
939
|
+
const headers = options.headers;
|
|
940
|
+
return getAvailableModelsImpl({ apiKey, teamId, headers, graphqlURL });
|
|
941
|
+
};
|
|
840
942
|
provider.imageModel = createImageModel;
|
|
841
943
|
provider.transcription = createTranscriptionModel;
|
|
842
944
|
provider.speech = createSpeechModel;
|