@friendliai/ai-provider 0.2.5 → 0.2.7-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @friendliai/ai-provider
2
2
 
3
+ ## 0.2.7-alpha.0
4
+
5
+ ### Patch Changes
6
+
7
+ - 5ec8524: Initial build for ai sdk v5 support (broken Friendli compatibility)
8
+
9
+ ## 0.2.6
10
+
11
+ ### Patch Changes
12
+
13
+ - 3e347dc: bump dependencies version
14
+
3
15
  ## 0.2.5
4
16
 
5
17
  ### Patch Changes
package/README.md CHANGED
@@ -12,7 +12,7 @@ npm i @friendliai/ai-provider
12
12
 
13
13
  ## Credentials
14
14
 
15
- The tokens required for model usage can be obtained from the [Friendli suite](https://suite.friendli.ai/).
15
+ The tokens required for model usage can be obtained from the [Friendli suite](https://friendli.ai/suite/).
16
16
 
17
17
  To use the provider, you need to set the `FRIENDLI_TOKEN` environment variable with your personal access token.
18
18
 
package/dist/index.d.mts CHANGED
@@ -1,38 +1,11 @@
1
- import { LanguageModelV1 } from '@ai-sdk/provider';
1
+ import { LanguageModelV2 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
- import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';
4
3
  import { z } from 'zod';
5
4
 
6
5
  declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
7
6
  type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
8
7
  type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
9
8
  type FriendliAIBetaChatModelId = string & {};
10
- interface FriendliAISharedSettings {
11
- /**
12
- * Sets the endpoint to which the request will be sent.
13
- * auto: automatically selected based on model_id
14
- * dedicated: Fixed to "/dedicated/v1"
15
- * serverless: automatically selected as one of "/serverless/beta", "/serverless/v1", or "/serverless/tools/v1"
16
- * Ignored if baseURL is specified.
17
- */
18
- endpoint?: 'auto' | 'dedicated' | 'serverless';
19
- }
20
- interface FriendliAIChatSettings extends FriendliAISharedSettings, OpenAICompatibleChatSettings {
21
- /**
22
- * BETA FEATURE: Include the model's training loss in the response.
23
- */
24
- tools?: Array<{
25
- type: 'web:url' | 'web:search' | 'math:calendar' | 'math:statistics' | 'math:calculator' | 'code:python-interpreter';
26
- }>;
27
- /**
28
- * Whether to enable parallel function calling during tool use. Default to true.
29
- */
30
- parallelToolCalls?: boolean;
31
- /**
32
- * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
33
- */
34
- regex?: RegExp;
35
- }
36
9
 
37
10
  interface FriendliAIProviderSettings {
38
11
  /**
@@ -61,26 +34,26 @@ interface FriendliAIProvider {
61
34
  /**
62
35
  * Creates a model for text generation.
63
36
  */
64
- (modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
37
+ (modelId: FriendliAILanguageModelId): LanguageModelV2;
65
38
  /**
66
39
  * A model that has not yet been officially released
67
40
  */
68
- beta(modelId: FriendliAIBetaChatModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
41
+ beta(modelId: FriendliAIBetaChatModelId): LanguageModelV2;
69
42
  /**
70
43
  * Creates a chat model for text generation.
71
44
  */
72
- chat(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
73
- chatModel(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
45
+ chat(modelId: FriendliAILanguageModelId): LanguageModelV2;
46
+ chatModel(modelId: FriendliAILanguageModelId): LanguageModelV2;
74
47
  /**
75
48
  * Creates a completion model for text generation.
76
49
  */
77
- completion(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
78
- completionModel(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
50
+ completion(modelId: FriendliAILanguageModelId): LanguageModelV2;
51
+ completionModel(modelId: FriendliAILanguageModelId): LanguageModelV2;
79
52
  /**
80
53
  * Creates a text embedding model for text generation.
81
54
  */
82
- embedding(modelId: string & {}, settings?: FriendliAIChatSettings): LanguageModelV1;
83
- textEmbeddingModel(modelId: string & {}, settings?: FriendliAIChatSettings): LanguageModelV1;
55
+ embedding(modelId: string & {}): LanguageModelV2;
56
+ textEmbeddingModel(modelId: string & {}): LanguageModelV2;
84
57
  }
85
58
  /**
86
59
  Create an FriendliAI provider instance.
@@ -90,10 +63,13 @@ declare const friendli: FriendliAIProvider;
90
63
 
91
64
  declare const friendliaiErrorSchema: z.ZodObject<{
92
65
  message: z.ZodString;
66
+ error: z.ZodRecord<z.ZodString, z.ZodAny>;
93
67
  }, "strip", z.ZodTypeAny, {
94
68
  message: string;
69
+ error: Record<string, any>;
95
70
  }, {
96
71
  message: string;
72
+ error: Record<string, any>;
97
73
  }>;
98
74
  type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
99
75
 
package/dist/index.d.ts CHANGED
@@ -1,38 +1,11 @@
1
- import { LanguageModelV1 } from '@ai-sdk/provider';
1
+ import { LanguageModelV2 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
- import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';
4
3
  import { z } from 'zod';
5
4
 
6
5
  declare const FriendliAIServerlessModelIds: readonly ["meta-llama-3.1-8b-instruct", "meta-llama-3.3-70b-instruct", "deepseek-r1"];
7
6
  type FriendliAIServerlessModelId = (typeof FriendliAIServerlessModelIds)[number];
8
7
  type FriendliAILanguageModelId = FriendliAIServerlessModelId | (string & {});
9
8
  type FriendliAIBetaChatModelId = string & {};
10
- interface FriendliAISharedSettings {
11
- /**
12
- * Sets the endpoint to which the request will be sent.
13
- * auto: automatically selected based on model_id
14
- * dedicated: Fixed to "/dedicated/v1"
15
- * serverless: automatically selected as one of "/serverless/beta", "/serverless/v1", or "/serverless/tools/v1"
16
- * Ignored if baseURL is specified.
17
- */
18
- endpoint?: 'auto' | 'dedicated' | 'serverless';
19
- }
20
- interface FriendliAIChatSettings extends FriendliAISharedSettings, OpenAICompatibleChatSettings {
21
- /**
22
- * BETA FEATURE: Include the model's training loss in the response.
23
- */
24
- tools?: Array<{
25
- type: 'web:url' | 'web:search' | 'math:calendar' | 'math:statistics' | 'math:calculator' | 'code:python-interpreter';
26
- }>;
27
- /**
28
- * Whether to enable parallel function calling during tool use. Default to true.
29
- */
30
- parallelToolCalls?: boolean;
31
- /**
32
- * BETA FEATURE: You can write a regular expression to force output that satisfies that regular expression.
33
- */
34
- regex?: RegExp;
35
- }
36
9
 
37
10
  interface FriendliAIProviderSettings {
38
11
  /**
@@ -61,26 +34,26 @@ interface FriendliAIProvider {
61
34
  /**
62
35
  * Creates a model for text generation.
63
36
  */
64
- (modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
37
+ (modelId: FriendliAILanguageModelId): LanguageModelV2;
65
38
  /**
66
39
  * A model that has not yet been officially released
67
40
  */
68
- beta(modelId: FriendliAIBetaChatModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
41
+ beta(modelId: FriendliAIBetaChatModelId): LanguageModelV2;
69
42
  /**
70
43
  * Creates a chat model for text generation.
71
44
  */
72
- chat(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
73
- chatModel(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
45
+ chat(modelId: FriendliAILanguageModelId): LanguageModelV2;
46
+ chatModel(modelId: FriendliAILanguageModelId): LanguageModelV2;
74
47
  /**
75
48
  * Creates a completion model for text generation.
76
49
  */
77
- completion(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
78
- completionModel(modelId: FriendliAILanguageModelId, settings?: FriendliAIChatSettings): LanguageModelV1;
50
+ completion(modelId: FriendliAILanguageModelId): LanguageModelV2;
51
+ completionModel(modelId: FriendliAILanguageModelId): LanguageModelV2;
79
52
  /**
80
53
  * Creates a text embedding model for text generation.
81
54
  */
82
- embedding(modelId: string & {}, settings?: FriendliAIChatSettings): LanguageModelV1;
83
- textEmbeddingModel(modelId: string & {}, settings?: FriendliAIChatSettings): LanguageModelV1;
55
+ embedding(modelId: string & {}): LanguageModelV2;
56
+ textEmbeddingModel(modelId: string & {}): LanguageModelV2;
84
57
  }
85
58
  /**
86
59
  Create an FriendliAI provider instance.
@@ -90,10 +63,13 @@ declare const friendli: FriendliAIProvider;
90
63
 
91
64
  declare const friendliaiErrorSchema: z.ZodObject<{
92
65
  message: z.ZodString;
66
+ error: z.ZodRecord<z.ZodString, z.ZodAny>;
93
67
  }, "strip", z.ZodTypeAny, {
94
68
  message: string;
69
+ error: Record<string, any>;
95
70
  }, {
96
71
  message: string;
72
+ error: Record<string, any>;
97
73
  }>;
98
74
  type FriendliAIErrorData = z.infer<typeof friendliaiErrorSchema>;
99
75