@ai-sdk/openai-compatible 1.0.0-canary.9 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1,19 +1,12 @@
1
- import { SharedV2ProviderMetadata, LanguageModelV2, EmbeddingModelV2, ImageModelV1, ProviderV2 } from '@ai-sdk/provider';
1
+ import { SharedV2ProviderMetadata, LanguageModelV2, EmbeddingModelV2, ImageModelV2, ProviderV2 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
- import { z, ZodSchema } from 'zod';
3
+ import { z, ZodType } from 'zod/v4';
4
4
 
5
5
  type OpenAICompatibleChatModelId = string;
6
6
  declare const openaiCompatibleProviderOptions: z.ZodObject<{
7
- /**
8
- * A unique identifier representing your end-user, which can help the provider to
9
- * monitor and detect abuse.
10
- */
11
7
  user: z.ZodOptional<z.ZodString>;
12
- }, "strip", z.ZodTypeAny, {
13
- user?: string | undefined;
14
- }, {
15
- user?: string | undefined;
16
- }>;
8
+ reasoningEffort: z.ZodOptional<z.ZodString>;
9
+ }, z.core.$strip>;
17
10
  type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
18
11
 
19
12
  declare const openaiCompatibleErrorDataSchema: z.ZodObject<{
@@ -21,36 +14,12 @@ declare const openaiCompatibleErrorDataSchema: z.ZodObject<{
21
14
  message: z.ZodString;
22
15
  type: z.ZodOptional<z.ZodNullable<z.ZodString>>;
23
16
  param: z.ZodOptional<z.ZodNullable<z.ZodAny>>;
24
- code: z.ZodOptional<z.ZodNullable<z.ZodUnion<[z.ZodString, z.ZodNumber]>>>;
25
- }, "strip", z.ZodTypeAny, {
26
- message: string;
27
- type?: string | null | undefined;
28
- code?: string | number | null | undefined;
29
- param?: any;
30
- }, {
31
- message: string;
32
- type?: string | null | undefined;
33
- code?: string | number | null | undefined;
34
- param?: any;
35
- }>;
36
- }, "strip", z.ZodTypeAny, {
37
- error: {
38
- message: string;
39
- type?: string | null | undefined;
40
- code?: string | number | null | undefined;
41
- param?: any;
42
- };
43
- }, {
44
- error: {
45
- message: string;
46
- type?: string | null | undefined;
47
- code?: string | number | null | undefined;
48
- param?: any;
49
- };
50
- }>;
17
+ code: z.ZodOptional<z.ZodNullable<z.ZodUnion<readonly [z.ZodString, z.ZodNumber]>>>;
18
+ }, z.core.$strip>;
19
+ }, z.core.$strip>;
51
20
  type OpenAICompatibleErrorData = z.infer<typeof openaiCompatibleErrorDataSchema>;
52
21
  type ProviderErrorStructure<T> = {
53
- errorSchema: ZodSchema<T>;
22
+ errorSchema: ZodType<T>;
54
23
  errorToMessage: (error: T) => string;
55
24
  isRetryable?: (response: Response, error?: T) => boolean;
56
25
  };
@@ -71,7 +40,7 @@ type MetadataExtractor = {
71
40
  */
72
41
  extractMetadata: ({ parsedBody, }: {
73
42
  parsedBody: unknown;
74
- }) => SharedV2ProviderMetadata | undefined;
43
+ }) => Promise<SharedV2ProviderMetadata | undefined>;
75
44
  /**
76
45
  * Creates an extractor for handling streaming responses. The returned object provides
77
46
  * methods to process individual chunks and build the final metadata from the accumulated
@@ -106,6 +75,7 @@ type OpenAICompatibleChatConfig = {
106
75
  path: string;
107
76
  }) => string;
108
77
  fetch?: FetchFunction;
78
+ includeUsage?: boolean;
109
79
  errorStructure?: ProviderErrorStructure<any>;
110
80
  metadataExtractor?: MetadataExtractor;
111
81
  /**
@@ -115,7 +85,7 @@ type OpenAICompatibleChatConfig = {
115
85
  /**
116
86
  * The supported URLs for the model.
117
87
  */
118
- getSupportedUrls?: () => Promise<Record<string, RegExp[]>>;
88
+ supportedUrls?: () => LanguageModelV2['supportedUrls'];
119
89
  };
120
90
  declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
121
91
  readonly specificationVersion = "v2";
@@ -127,7 +97,7 @@ declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
127
97
  constructor(modelId: OpenAICompatibleChatModelId, config: OpenAICompatibleChatConfig);
128
98
  get provider(): string;
129
99
  private get providerOptionsName();
130
- getSupportedUrls(): Promise<Record<string, RegExp[]>>;
100
+ get supportedUrls(): Record<string, RegExp[]> | PromiseLike<Record<string, RegExp[]>>;
131
101
  private getArgs;
132
102
  doGenerate(options: Parameters<LanguageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>>;
133
103
  doStream(options: Parameters<LanguageModelV2['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>>;
@@ -135,41 +105,16 @@ declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
135
105
 
136
106
  type OpenAICompatibleCompletionModelId = string;
137
107
  declare const openaiCompatibleCompletionProviderOptions: z.ZodObject<{
138
- /**
139
- * Echo back the prompt in addition to the completion.
140
- */
141
108
  echo: z.ZodOptional<z.ZodBoolean>;
142
- /**
143
- * Modify the likelihood of specified tokens appearing in the completion.
144
- *
145
- * Accepts a JSON object that maps tokens (specified by their token ID in
146
- * the GPT tokenizer) to an associated bias value from -100 to 100.
147
- */
148
- logitBias: z.ZodOptional<z.ZodRecord<z.ZodNumber, z.ZodNumber>>;
149
- /**
150
- * The suffix that comes after a completion of inserted text.
151
- */
109
+ logitBias: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodNumber>>;
152
110
  suffix: z.ZodOptional<z.ZodString>;
153
- /**
154
- * A unique identifier representing your end-user, which can help providers to
155
- * monitor and detect abuse.
156
- */
157
111
  user: z.ZodOptional<z.ZodString>;
158
- }, "strip", z.ZodTypeAny, {
159
- user?: string | undefined;
160
- echo?: boolean | undefined;
161
- logitBias?: Record<number, number> | undefined;
162
- suffix?: string | undefined;
163
- }, {
164
- user?: string | undefined;
165
- echo?: boolean | undefined;
166
- logitBias?: Record<number, number> | undefined;
167
- suffix?: string | undefined;
168
- }>;
112
+ }, z.core.$strip>;
169
113
  type OpenAICompatibleCompletionProviderOptions = z.infer<typeof openaiCompatibleCompletionProviderOptions>;
170
114
 
171
115
  type OpenAICompatibleCompletionConfig = {
172
116
  provider: string;
117
+ includeUsage?: boolean;
173
118
  headers: () => Record<string, string | undefined>;
174
119
  url: (options: {
175
120
  modelId: string;
@@ -180,7 +125,7 @@ type OpenAICompatibleCompletionConfig = {
180
125
  /**
181
126
  * The supported URLs for the model.
182
127
  */
183
- getSupportedUrls?: () => Promise<Record<string, RegExp[]>>;
128
+ supportedUrls?: () => LanguageModelV2['supportedUrls'];
184
129
  };
185
130
  declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2 {
186
131
  readonly specificationVersion = "v2";
@@ -191,7 +136,7 @@ declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2
191
136
  constructor(modelId: OpenAICompatibleCompletionModelId, config: OpenAICompatibleCompletionConfig);
192
137
  get provider(): string;
193
138
  private get providerOptionsName();
194
- getSupportedUrls(): Promise<Record<string, RegExp[]>>;
139
+ get supportedUrls(): Record<string, RegExp[]> | PromiseLike<Record<string, RegExp[]>>;
195
140
  private getArgs;
196
141
  doGenerate(options: Parameters<LanguageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>>;
197
142
  doStream(options: Parameters<LanguageModelV2['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>>;
@@ -199,23 +144,9 @@ declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2
199
144
 
200
145
  type OpenAICompatibleEmbeddingModelId = string;
201
146
  declare const openaiCompatibleEmbeddingProviderOptions: z.ZodObject<{
202
- /**
203
- * The number of dimensions the resulting output embeddings should have.
204
- * Only supported in text-embedding-3 and later models.
205
- */
206
147
  dimensions: z.ZodOptional<z.ZodNumber>;
207
- /**
208
- * A unique identifier representing your end-user, which can help providers to
209
- * monitor and detect abuse.
210
- */
211
148
  user: z.ZodOptional<z.ZodString>;
212
- }, "strip", z.ZodTypeAny, {
213
- user?: string | undefined;
214
- dimensions?: number | undefined;
215
- }, {
216
- user?: string | undefined;
217
- dimensions?: number | undefined;
218
- }>;
149
+ }, z.core.$strip>;
219
150
  type OpenAICompatibleEmbeddingProviderOptions = z.infer<typeof openaiCompatibleEmbeddingProviderOptions>;
220
151
 
221
152
  type OpenAICompatibleEmbeddingConfig = {
@@ -249,17 +180,6 @@ declare class OpenAICompatibleEmbeddingModel implements EmbeddingModelV2<string>
249
180
  }
250
181
 
251
182
  type OpenAICompatibleImageModelId = string;
252
- interface OpenAICompatibleImageSettings {
253
- /**
254
- A unique identifier representing your end-user, which can help the provider to
255
- monitor and detect abuse.
256
- */
257
- user?: string;
258
- /**
259
- * The maximum number of images to generate.
260
- */
261
- maxImagesPerCall?: number;
262
- }
263
183
 
264
184
  type OpenAICompatibleImageModelConfig = {
265
185
  provider: string;
@@ -274,15 +194,14 @@ type OpenAICompatibleImageModelConfig = {
274
194
  currentDate?: () => Date;
275
195
  };
276
196
  };
277
- declare class OpenAICompatibleImageModel implements ImageModelV1 {
197
+ declare class OpenAICompatibleImageModel implements ImageModelV2 {
278
198
  readonly modelId: OpenAICompatibleImageModelId;
279
- private readonly settings;
280
199
  private readonly config;
281
- readonly specificationVersion = "v1";
282
- get maxImagesPerCall(): number;
200
+ readonly specificationVersion = "v2";
201
+ readonly maxImagesPerCall = 10;
283
202
  get provider(): string;
284
- constructor(modelId: OpenAICompatibleImageModelId, settings: OpenAICompatibleImageSettings, config: OpenAICompatibleImageModelConfig);
285
- doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV1['doGenerate']>>>;
203
+ constructor(modelId: OpenAICompatibleImageModelId, config: OpenAICompatibleImageModelConfig);
204
+ doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV2['doGenerate']>>>;
286
205
  }
287
206
 
288
207
  interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPLETION_MODEL_IDS extends string = string, EMBEDDING_MODEL_IDS extends string = string, IMAGE_MODEL_IDS extends string = string> extends Omit<ProviderV2, 'imageModel'> {
@@ -291,7 +210,7 @@ interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPL
291
210
  chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;
292
211
  completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV2;
293
212
  textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV2<string>;
294
- imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV1;
213
+ imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV2;
295
214
  }
296
215
  interface OpenAICompatibleProviderSettings {
297
216
  /**
@@ -322,10 +241,14 @@ interface OpenAICompatibleProviderSettings {
322
241
  or to provide a custom fetch implementation for e.g. testing.
323
242
  */
324
243
  fetch?: FetchFunction;
244
+ /**
245
+ Include usage information in streaming responses.
246
+ */
247
+ includeUsage?: boolean;
325
248
  }
326
249
  /**
327
250
  Create an OpenAICompatible provider instance.
328
251
  */
329
252
  declare function createOpenAICompatible<CHAT_MODEL_IDS extends string, COMPLETION_MODEL_IDS extends string, EMBEDDING_MODEL_IDS extends string, IMAGE_MODEL_IDS extends string>(options: OpenAICompatibleProviderSettings): OpenAICompatibleProvider<CHAT_MODEL_IDS, COMPLETION_MODEL_IDS, EMBEDDING_MODEL_IDS, IMAGE_MODEL_IDS>;
330
253
 
331
- export { type MetadataExtractor, OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatModelId, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionModelId, type OpenAICompatibleCompletionProviderOptions, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingModelId, type OpenAICompatibleEmbeddingProviderOptions, type OpenAICompatibleErrorData, OpenAICompatibleImageModel, type OpenAICompatibleImageSettings, type OpenAICompatibleProvider, type OpenAICompatibleProviderOptions, type OpenAICompatibleProviderSettings, type ProviderErrorStructure, createOpenAICompatible };
254
+ export { type MetadataExtractor, OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatModelId, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionModelId, type OpenAICompatibleCompletionProviderOptions, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingModelId, type OpenAICompatibleEmbeddingProviderOptions, type OpenAICompatibleErrorData, OpenAICompatibleImageModel, type OpenAICompatibleProvider, type OpenAICompatibleProviderOptions, type OpenAICompatibleProviderSettings, type ProviderErrorStructure, createOpenAICompatible };
package/dist/index.d.ts CHANGED
@@ -1,19 +1,12 @@
1
- import { SharedV2ProviderMetadata, LanguageModelV2, EmbeddingModelV2, ImageModelV1, ProviderV2 } from '@ai-sdk/provider';
1
+ import { SharedV2ProviderMetadata, LanguageModelV2, EmbeddingModelV2, ImageModelV2, ProviderV2 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
- import { z, ZodSchema } from 'zod';
3
+ import { z, ZodType } from 'zod/v4';
4
4
 
5
5
  type OpenAICompatibleChatModelId = string;
6
6
  declare const openaiCompatibleProviderOptions: z.ZodObject<{
7
- /**
8
- * A unique identifier representing your end-user, which can help the provider to
9
- * monitor and detect abuse.
10
- */
11
7
  user: z.ZodOptional<z.ZodString>;
12
- }, "strip", z.ZodTypeAny, {
13
- user?: string | undefined;
14
- }, {
15
- user?: string | undefined;
16
- }>;
8
+ reasoningEffort: z.ZodOptional<z.ZodString>;
9
+ }, z.core.$strip>;
17
10
  type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
18
11
 
19
12
  declare const openaiCompatibleErrorDataSchema: z.ZodObject<{
@@ -21,36 +14,12 @@ declare const openaiCompatibleErrorDataSchema: z.ZodObject<{
21
14
  message: z.ZodString;
22
15
  type: z.ZodOptional<z.ZodNullable<z.ZodString>>;
23
16
  param: z.ZodOptional<z.ZodNullable<z.ZodAny>>;
24
- code: z.ZodOptional<z.ZodNullable<z.ZodUnion<[z.ZodString, z.ZodNumber]>>>;
25
- }, "strip", z.ZodTypeAny, {
26
- message: string;
27
- type?: string | null | undefined;
28
- code?: string | number | null | undefined;
29
- param?: any;
30
- }, {
31
- message: string;
32
- type?: string | null | undefined;
33
- code?: string | number | null | undefined;
34
- param?: any;
35
- }>;
36
- }, "strip", z.ZodTypeAny, {
37
- error: {
38
- message: string;
39
- type?: string | null | undefined;
40
- code?: string | number | null | undefined;
41
- param?: any;
42
- };
43
- }, {
44
- error: {
45
- message: string;
46
- type?: string | null | undefined;
47
- code?: string | number | null | undefined;
48
- param?: any;
49
- };
50
- }>;
17
+ code: z.ZodOptional<z.ZodNullable<z.ZodUnion<readonly [z.ZodString, z.ZodNumber]>>>;
18
+ }, z.core.$strip>;
19
+ }, z.core.$strip>;
51
20
  type OpenAICompatibleErrorData = z.infer<typeof openaiCompatibleErrorDataSchema>;
52
21
  type ProviderErrorStructure<T> = {
53
- errorSchema: ZodSchema<T>;
22
+ errorSchema: ZodType<T>;
54
23
  errorToMessage: (error: T) => string;
55
24
  isRetryable?: (response: Response, error?: T) => boolean;
56
25
  };
@@ -71,7 +40,7 @@ type MetadataExtractor = {
71
40
  */
72
41
  extractMetadata: ({ parsedBody, }: {
73
42
  parsedBody: unknown;
74
- }) => SharedV2ProviderMetadata | undefined;
43
+ }) => Promise<SharedV2ProviderMetadata | undefined>;
75
44
  /**
76
45
  * Creates an extractor for handling streaming responses. The returned object provides
77
46
  * methods to process individual chunks and build the final metadata from the accumulated
@@ -106,6 +75,7 @@ type OpenAICompatibleChatConfig = {
106
75
  path: string;
107
76
  }) => string;
108
77
  fetch?: FetchFunction;
78
+ includeUsage?: boolean;
109
79
  errorStructure?: ProviderErrorStructure<any>;
110
80
  metadataExtractor?: MetadataExtractor;
111
81
  /**
@@ -115,7 +85,7 @@ type OpenAICompatibleChatConfig = {
115
85
  /**
116
86
  * The supported URLs for the model.
117
87
  */
118
- getSupportedUrls?: () => Promise<Record<string, RegExp[]>>;
88
+ supportedUrls?: () => LanguageModelV2['supportedUrls'];
119
89
  };
120
90
  declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
121
91
  readonly specificationVersion = "v2";
@@ -127,7 +97,7 @@ declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
127
97
  constructor(modelId: OpenAICompatibleChatModelId, config: OpenAICompatibleChatConfig);
128
98
  get provider(): string;
129
99
  private get providerOptionsName();
130
- getSupportedUrls(): Promise<Record<string, RegExp[]>>;
100
+ get supportedUrls(): Record<string, RegExp[]> | PromiseLike<Record<string, RegExp[]>>;
131
101
  private getArgs;
132
102
  doGenerate(options: Parameters<LanguageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>>;
133
103
  doStream(options: Parameters<LanguageModelV2['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>>;
@@ -135,41 +105,16 @@ declare class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
135
105
 
136
106
  type OpenAICompatibleCompletionModelId = string;
137
107
  declare const openaiCompatibleCompletionProviderOptions: z.ZodObject<{
138
- /**
139
- * Echo back the prompt in addition to the completion.
140
- */
141
108
  echo: z.ZodOptional<z.ZodBoolean>;
142
- /**
143
- * Modify the likelihood of specified tokens appearing in the completion.
144
- *
145
- * Accepts a JSON object that maps tokens (specified by their token ID in
146
- * the GPT tokenizer) to an associated bias value from -100 to 100.
147
- */
148
- logitBias: z.ZodOptional<z.ZodRecord<z.ZodNumber, z.ZodNumber>>;
149
- /**
150
- * The suffix that comes after a completion of inserted text.
151
- */
109
+ logitBias: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodNumber>>;
152
110
  suffix: z.ZodOptional<z.ZodString>;
153
- /**
154
- * A unique identifier representing your end-user, which can help providers to
155
- * monitor and detect abuse.
156
- */
157
111
  user: z.ZodOptional<z.ZodString>;
158
- }, "strip", z.ZodTypeAny, {
159
- user?: string | undefined;
160
- echo?: boolean | undefined;
161
- logitBias?: Record<number, number> | undefined;
162
- suffix?: string | undefined;
163
- }, {
164
- user?: string | undefined;
165
- echo?: boolean | undefined;
166
- logitBias?: Record<number, number> | undefined;
167
- suffix?: string | undefined;
168
- }>;
112
+ }, z.core.$strip>;
169
113
  type OpenAICompatibleCompletionProviderOptions = z.infer<typeof openaiCompatibleCompletionProviderOptions>;
170
114
 
171
115
  type OpenAICompatibleCompletionConfig = {
172
116
  provider: string;
117
+ includeUsage?: boolean;
173
118
  headers: () => Record<string, string | undefined>;
174
119
  url: (options: {
175
120
  modelId: string;
@@ -180,7 +125,7 @@ type OpenAICompatibleCompletionConfig = {
180
125
  /**
181
126
  * The supported URLs for the model.
182
127
  */
183
- getSupportedUrls?: () => Promise<Record<string, RegExp[]>>;
128
+ supportedUrls?: () => LanguageModelV2['supportedUrls'];
184
129
  };
185
130
  declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2 {
186
131
  readonly specificationVersion = "v2";
@@ -191,7 +136,7 @@ declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2
191
136
  constructor(modelId: OpenAICompatibleCompletionModelId, config: OpenAICompatibleCompletionConfig);
192
137
  get provider(): string;
193
138
  private get providerOptionsName();
194
- getSupportedUrls(): Promise<Record<string, RegExp[]>>;
139
+ get supportedUrls(): Record<string, RegExp[]> | PromiseLike<Record<string, RegExp[]>>;
195
140
  private getArgs;
196
141
  doGenerate(options: Parameters<LanguageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>>;
197
142
  doStream(options: Parameters<LanguageModelV2['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>>;
@@ -199,23 +144,9 @@ declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV2
199
144
 
200
145
  type OpenAICompatibleEmbeddingModelId = string;
201
146
  declare const openaiCompatibleEmbeddingProviderOptions: z.ZodObject<{
202
- /**
203
- * The number of dimensions the resulting output embeddings should have.
204
- * Only supported in text-embedding-3 and later models.
205
- */
206
147
  dimensions: z.ZodOptional<z.ZodNumber>;
207
- /**
208
- * A unique identifier representing your end-user, which can help providers to
209
- * monitor and detect abuse.
210
- */
211
148
  user: z.ZodOptional<z.ZodString>;
212
- }, "strip", z.ZodTypeAny, {
213
- user?: string | undefined;
214
- dimensions?: number | undefined;
215
- }, {
216
- user?: string | undefined;
217
- dimensions?: number | undefined;
218
- }>;
149
+ }, z.core.$strip>;
219
150
  type OpenAICompatibleEmbeddingProviderOptions = z.infer<typeof openaiCompatibleEmbeddingProviderOptions>;
220
151
 
221
152
  type OpenAICompatibleEmbeddingConfig = {
@@ -249,17 +180,6 @@ declare class OpenAICompatibleEmbeddingModel implements EmbeddingModelV2<string>
249
180
  }
250
181
 
251
182
  type OpenAICompatibleImageModelId = string;
252
- interface OpenAICompatibleImageSettings {
253
- /**
254
- A unique identifier representing your end-user, which can help the provider to
255
- monitor and detect abuse.
256
- */
257
- user?: string;
258
- /**
259
- * The maximum number of images to generate.
260
- */
261
- maxImagesPerCall?: number;
262
- }
263
183
 
264
184
  type OpenAICompatibleImageModelConfig = {
265
185
  provider: string;
@@ -274,15 +194,14 @@ type OpenAICompatibleImageModelConfig = {
274
194
  currentDate?: () => Date;
275
195
  };
276
196
  };
277
- declare class OpenAICompatibleImageModel implements ImageModelV1 {
197
+ declare class OpenAICompatibleImageModel implements ImageModelV2 {
278
198
  readonly modelId: OpenAICompatibleImageModelId;
279
- private readonly settings;
280
199
  private readonly config;
281
- readonly specificationVersion = "v1";
282
- get maxImagesPerCall(): number;
200
+ readonly specificationVersion = "v2";
201
+ readonly maxImagesPerCall = 10;
283
202
  get provider(): string;
284
- constructor(modelId: OpenAICompatibleImageModelId, settings: OpenAICompatibleImageSettings, config: OpenAICompatibleImageModelConfig);
285
- doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV1['doGenerate']>>>;
203
+ constructor(modelId: OpenAICompatibleImageModelId, config: OpenAICompatibleImageModelConfig);
204
+ doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV2['doGenerate']>>>;
286
205
  }
287
206
 
288
207
  interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPLETION_MODEL_IDS extends string = string, EMBEDDING_MODEL_IDS extends string = string, IMAGE_MODEL_IDS extends string = string> extends Omit<ProviderV2, 'imageModel'> {
@@ -291,7 +210,7 @@ interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPL
291
210
  chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;
292
211
  completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV2;
293
212
  textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV2<string>;
294
- imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV1;
213
+ imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV2;
295
214
  }
296
215
  interface OpenAICompatibleProviderSettings {
297
216
  /**
@@ -322,10 +241,14 @@ interface OpenAICompatibleProviderSettings {
322
241
  or to provide a custom fetch implementation for e.g. testing.
323
242
  */
324
243
  fetch?: FetchFunction;
244
+ /**
245
+ Include usage information in streaming responses.
246
+ */
247
+ includeUsage?: boolean;
325
248
  }
326
249
  /**
327
250
  Create an OpenAICompatible provider instance.
328
251
  */
329
252
  declare function createOpenAICompatible<CHAT_MODEL_IDS extends string, COMPLETION_MODEL_IDS extends string, EMBEDDING_MODEL_IDS extends string, IMAGE_MODEL_IDS extends string>(options: OpenAICompatibleProviderSettings): OpenAICompatibleProvider<CHAT_MODEL_IDS, COMPLETION_MODEL_IDS, EMBEDDING_MODEL_IDS, IMAGE_MODEL_IDS>;
330
253
 
331
- export { type MetadataExtractor, OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatModelId, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionModelId, type OpenAICompatibleCompletionProviderOptions, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingModelId, type OpenAICompatibleEmbeddingProviderOptions, type OpenAICompatibleErrorData, OpenAICompatibleImageModel, type OpenAICompatibleImageSettings, type OpenAICompatibleProvider, type OpenAICompatibleProviderOptions, type OpenAICompatibleProviderSettings, type ProviderErrorStructure, createOpenAICompatible };
254
+ export { type MetadataExtractor, OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatModelId, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionModelId, type OpenAICompatibleCompletionProviderOptions, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingModelId, type OpenAICompatibleEmbeddingProviderOptions, type OpenAICompatibleErrorData, OpenAICompatibleImageModel, type OpenAICompatibleProvider, type OpenAICompatibleProviderOptions, type OpenAICompatibleProviderSettings, type ProviderErrorStructure, createOpenAICompatible };