@ai-sdk/openai-compatible 2.0.15 → 2.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +23 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +23 -6
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -2
- package/src/chat/convert-openai-compatible-chat-usage.ts +55 -0
- package/src/chat/convert-to-openai-compatible-chat-messages.test.ts +1238 -0
- package/src/chat/convert-to-openai-compatible-chat-messages.ts +246 -0
- package/src/chat/get-response-metadata.ts +15 -0
- package/src/chat/map-openai-compatible-finish-reason.ts +19 -0
- package/src/chat/openai-compatible-api-types.ts +86 -0
- package/src/chat/openai-compatible-chat-language-model.test.ts +3292 -0
- package/src/chat/openai-compatible-chat-language-model.ts +830 -0
- package/src/chat/openai-compatible-chat-options.ts +34 -0
- package/src/chat/openai-compatible-metadata-extractor.ts +48 -0
- package/src/chat/openai-compatible-prepare-tools.test.ts +336 -0
- package/src/chat/openai-compatible-prepare-tools.ts +98 -0
- package/src/completion/convert-openai-compatible-completion-usage.ts +46 -0
- package/src/completion/convert-to-openai-compatible-completion-prompt.ts +93 -0
- package/src/completion/get-response-metadata.ts +15 -0
- package/src/completion/map-openai-compatible-finish-reason.ts +19 -0
- package/src/completion/openai-compatible-completion-language-model.test.ts +773 -0
- package/src/completion/openai-compatible-completion-language-model.ts +390 -0
- package/src/completion/openai-compatible-completion-options.ts +33 -0
- package/src/embedding/openai-compatible-embedding-model.test.ts +171 -0
- package/src/embedding/openai-compatible-embedding-model.ts +166 -0
- package/src/embedding/openai-compatible-embedding-options.ts +21 -0
- package/src/image/openai-compatible-image-model.test.ts +494 -0
- package/src/image/openai-compatible-image-model.ts +205 -0
- package/src/image/openai-compatible-image-settings.ts +1 -0
- package/src/index.ts +27 -0
- package/src/internal/index.ts +4 -0
- package/src/openai-compatible-error.ts +30 -0
- package/src/openai-compatible-provider.test.ts +329 -0
- package/src/openai-compatible-provider.ts +189 -0
- package/src/version.ts +5 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import {
|
|
2
|
+
LanguageModelV3Prompt,
|
|
3
|
+
SharedV3ProviderMetadata,
|
|
4
|
+
UnsupportedFunctionalityError,
|
|
5
|
+
} from '@ai-sdk/provider';
|
|
6
|
+
import { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';
|
|
7
|
+
import { convertToBase64 } from '@ai-sdk/provider-utils';
|
|
8
|
+
|
|
9
|
+
function getOpenAIMetadata(message: {
|
|
10
|
+
providerOptions?: SharedV3ProviderMetadata;
|
|
11
|
+
}) {
|
|
12
|
+
return message?.providerOptions?.openaiCompatible ?? {};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function getAudioFormat(mediaType: string): 'wav' | 'mp3' | null {
|
|
16
|
+
switch (mediaType) {
|
|
17
|
+
case 'audio/wav':
|
|
18
|
+
return 'wav';
|
|
19
|
+
case 'audio/mp3':
|
|
20
|
+
case 'audio/mpeg':
|
|
21
|
+
return 'mp3';
|
|
22
|
+
default:
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function convertToOpenAICompatibleChatMessages(
|
|
28
|
+
prompt: LanguageModelV3Prompt,
|
|
29
|
+
): OpenAICompatibleChatPrompt {
|
|
30
|
+
const messages: OpenAICompatibleChatPrompt = [];
|
|
31
|
+
for (const { role, content, ...message } of prompt) {
|
|
32
|
+
const metadata = getOpenAIMetadata({ ...message });
|
|
33
|
+
switch (role) {
|
|
34
|
+
case 'system': {
|
|
35
|
+
messages.push({ role: 'system', content, ...metadata });
|
|
36
|
+
break;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
case 'user': {
|
|
40
|
+
if (content.length === 1 && content[0].type === 'text') {
|
|
41
|
+
messages.push({
|
|
42
|
+
role: 'user',
|
|
43
|
+
content: content[0].text,
|
|
44
|
+
...getOpenAIMetadata(content[0]),
|
|
45
|
+
});
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
messages.push({
|
|
50
|
+
role: 'user',
|
|
51
|
+
content: content.map(part => {
|
|
52
|
+
const partMetadata = getOpenAIMetadata(part);
|
|
53
|
+
switch (part.type) {
|
|
54
|
+
case 'text': {
|
|
55
|
+
return { type: 'text', text: part.text, ...partMetadata };
|
|
56
|
+
}
|
|
57
|
+
case 'file': {
|
|
58
|
+
if (part.mediaType.startsWith('image/')) {
|
|
59
|
+
const mediaType =
|
|
60
|
+
part.mediaType === 'image/*'
|
|
61
|
+
? 'image/jpeg'
|
|
62
|
+
: part.mediaType;
|
|
63
|
+
|
|
64
|
+
return {
|
|
65
|
+
type: 'image_url',
|
|
66
|
+
image_url: {
|
|
67
|
+
url:
|
|
68
|
+
part.data instanceof URL
|
|
69
|
+
? part.data.toString()
|
|
70
|
+
: `data:${mediaType};base64,${convertToBase64(part.data)}`,
|
|
71
|
+
},
|
|
72
|
+
...partMetadata,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (part.mediaType.startsWith('audio/')) {
|
|
77
|
+
if (part.data instanceof URL) {
|
|
78
|
+
throw new UnsupportedFunctionalityError({
|
|
79
|
+
functionality: 'audio file parts with URLs',
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const format = getAudioFormat(part.mediaType);
|
|
84
|
+
if (format === null) {
|
|
85
|
+
throw new UnsupportedFunctionalityError({
|
|
86
|
+
functionality: `audio media type ${part.mediaType}`,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
type: 'input_audio',
|
|
92
|
+
input_audio: {
|
|
93
|
+
data: convertToBase64(part.data),
|
|
94
|
+
format,
|
|
95
|
+
},
|
|
96
|
+
...partMetadata,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (part.mediaType === 'application/pdf') {
|
|
101
|
+
if (part.data instanceof URL) {
|
|
102
|
+
throw new UnsupportedFunctionalityError({
|
|
103
|
+
functionality: 'PDF file parts with URLs',
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return {
|
|
108
|
+
type: 'file',
|
|
109
|
+
file: {
|
|
110
|
+
filename: part.filename ?? 'document.pdf',
|
|
111
|
+
file_data: `data:application/pdf;base64,${convertToBase64(part.data)}`,
|
|
112
|
+
},
|
|
113
|
+
...partMetadata,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if (part.mediaType.startsWith('text/')) {
|
|
118
|
+
const textContent =
|
|
119
|
+
part.data instanceof URL
|
|
120
|
+
? part.data.toString()
|
|
121
|
+
: typeof part.data === 'string'
|
|
122
|
+
? part.data
|
|
123
|
+
: new TextDecoder().decode(part.data);
|
|
124
|
+
|
|
125
|
+
return {
|
|
126
|
+
type: 'text',
|
|
127
|
+
text: textContent,
|
|
128
|
+
...partMetadata,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Unsupported type
|
|
133
|
+
throw new UnsupportedFunctionalityError({
|
|
134
|
+
functionality: `file part media type ${part.mediaType}`,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}),
|
|
139
|
+
...metadata,
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
break;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
case 'assistant': {
|
|
146
|
+
let text = '';
|
|
147
|
+
const toolCalls: Array<{
|
|
148
|
+
id: string;
|
|
149
|
+
type: 'function';
|
|
150
|
+
function: { name: string; arguments: string };
|
|
151
|
+
extra_content?: {
|
|
152
|
+
google?: {
|
|
153
|
+
thought_signature?: string;
|
|
154
|
+
};
|
|
155
|
+
};
|
|
156
|
+
}> = [];
|
|
157
|
+
|
|
158
|
+
for (const part of content) {
|
|
159
|
+
const partMetadata = getOpenAIMetadata(part);
|
|
160
|
+
switch (part.type) {
|
|
161
|
+
case 'text': {
|
|
162
|
+
text += part.text;
|
|
163
|
+
break;
|
|
164
|
+
}
|
|
165
|
+
case 'tool-call': {
|
|
166
|
+
// TODO: thoughtSignature should be abstracted once we add support for other providers
|
|
167
|
+
const thoughtSignature =
|
|
168
|
+
part.providerOptions?.google?.thoughtSignature;
|
|
169
|
+
toolCalls.push({
|
|
170
|
+
id: part.toolCallId,
|
|
171
|
+
type: 'function',
|
|
172
|
+
function: {
|
|
173
|
+
name: part.toolName,
|
|
174
|
+
arguments: JSON.stringify(part.input),
|
|
175
|
+
},
|
|
176
|
+
...partMetadata,
|
|
177
|
+
// Include extra_content for Google Gemini thought signatures
|
|
178
|
+
...(thoughtSignature
|
|
179
|
+
? {
|
|
180
|
+
extra_content: {
|
|
181
|
+
google: {
|
|
182
|
+
thought_signature: String(thoughtSignature),
|
|
183
|
+
},
|
|
184
|
+
},
|
|
185
|
+
}
|
|
186
|
+
: {}),
|
|
187
|
+
});
|
|
188
|
+
break;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
messages.push({
|
|
194
|
+
role: 'assistant',
|
|
195
|
+
content: text,
|
|
196
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : undefined,
|
|
197
|
+
...metadata,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
break;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
case 'tool': {
|
|
204
|
+
for (const toolResponse of content) {
|
|
205
|
+
if (toolResponse.type === 'tool-approval-response') {
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const output = toolResponse.output;
|
|
210
|
+
|
|
211
|
+
let contentValue: string;
|
|
212
|
+
switch (output.type) {
|
|
213
|
+
case 'text':
|
|
214
|
+
case 'error-text':
|
|
215
|
+
contentValue = output.value;
|
|
216
|
+
break;
|
|
217
|
+
case 'execution-denied':
|
|
218
|
+
contentValue = output.reason ?? 'Tool execution denied.';
|
|
219
|
+
break;
|
|
220
|
+
case 'content':
|
|
221
|
+
case 'json':
|
|
222
|
+
case 'error-json':
|
|
223
|
+
contentValue = JSON.stringify(output.value);
|
|
224
|
+
break;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
const toolResponseMetadata = getOpenAIMetadata(toolResponse);
|
|
228
|
+
messages.push({
|
|
229
|
+
role: 'tool',
|
|
230
|
+
tool_call_id: toolResponse.toolCallId,
|
|
231
|
+
content: contentValue,
|
|
232
|
+
...toolResponseMetadata,
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
break;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
default: {
|
|
239
|
+
const _exhaustiveCheck: never = role;
|
|
240
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
return messages;
|
|
246
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export function getResponseMetadata({
|
|
2
|
+
id,
|
|
3
|
+
model,
|
|
4
|
+
created,
|
|
5
|
+
}: {
|
|
6
|
+
id?: string | undefined | null;
|
|
7
|
+
created?: number | undefined | null;
|
|
8
|
+
model?: string | undefined | null;
|
|
9
|
+
}) {
|
|
10
|
+
return {
|
|
11
|
+
id: id ?? undefined,
|
|
12
|
+
modelId: model ?? undefined,
|
|
13
|
+
timestamp: created != null ? new Date(created * 1000) : undefined,
|
|
14
|
+
};
|
|
15
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { LanguageModelV3FinishReason } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
export function mapOpenAICompatibleFinishReason(
|
|
4
|
+
finishReason: string | null | undefined,
|
|
5
|
+
): LanguageModelV3FinishReason['unified'] {
|
|
6
|
+
switch (finishReason) {
|
|
7
|
+
case 'stop':
|
|
8
|
+
return 'stop';
|
|
9
|
+
case 'length':
|
|
10
|
+
return 'length';
|
|
11
|
+
case 'content_filter':
|
|
12
|
+
return 'content-filter';
|
|
13
|
+
case 'function_call':
|
|
14
|
+
case 'tool_calls':
|
|
15
|
+
return 'tool-calls';
|
|
16
|
+
default:
|
|
17
|
+
return 'other';
|
|
18
|
+
}
|
|
19
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { JSONValue } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
export type OpenAICompatibleChatPrompt = Array<OpenAICompatibleMessage>;
|
|
4
|
+
|
|
5
|
+
export type OpenAICompatibleMessage =
|
|
6
|
+
| OpenAICompatibleSystemMessage
|
|
7
|
+
| OpenAICompatibleUserMessage
|
|
8
|
+
| OpenAICompatibleAssistantMessage
|
|
9
|
+
| OpenAICompatibleToolMessage;
|
|
10
|
+
|
|
11
|
+
// Allow for arbitrary additional properties for general purpose
|
|
12
|
+
// provider-metadata-specific extensibility.
|
|
13
|
+
type JsonRecord<T = never> = Record<
|
|
14
|
+
string,
|
|
15
|
+
JSONValue | JSONValue[] | T | T[] | undefined
|
|
16
|
+
>;
|
|
17
|
+
|
|
18
|
+
export interface OpenAICompatibleSystemMessage extends JsonRecord {
|
|
19
|
+
role: 'system';
|
|
20
|
+
content: string;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface OpenAICompatibleUserMessage
|
|
24
|
+
extends JsonRecord<OpenAICompatibleContentPart> {
|
|
25
|
+
role: 'user';
|
|
26
|
+
content: string | Array<OpenAICompatibleContentPart>;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export type OpenAICompatibleContentPart =
|
|
30
|
+
| OpenAICompatibleContentPartText
|
|
31
|
+
| OpenAICompatibleContentPartImage
|
|
32
|
+
| OpenAICompatibleContentPartInputAudio
|
|
33
|
+
| OpenAICompatibleContentPartFile;
|
|
34
|
+
|
|
35
|
+
export interface OpenAICompatibleContentPartText extends JsonRecord {
|
|
36
|
+
type: 'text';
|
|
37
|
+
text: string;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export interface OpenAICompatibleContentPartImage extends JsonRecord {
|
|
41
|
+
type: 'image_url';
|
|
42
|
+
image_url: { url: string };
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Audio parts for Google API
|
|
46
|
+
export interface OpenAICompatibleContentPartInputAudio extends JsonRecord {
|
|
47
|
+
type: 'input_audio';
|
|
48
|
+
input_audio: { data: string; format: 'wav' | 'mp3' };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// File parts for Google API
|
|
52
|
+
export interface OpenAICompatibleContentPartFile extends JsonRecord {
|
|
53
|
+
type: 'file';
|
|
54
|
+
file: { filename: string; file_data: string };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export interface OpenAICompatibleAssistantMessage
|
|
58
|
+
extends JsonRecord<OpenAICompatibleMessageToolCall> {
|
|
59
|
+
role: 'assistant';
|
|
60
|
+
content?: string | null;
|
|
61
|
+
tool_calls?: Array<OpenAICompatibleMessageToolCall>;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export interface OpenAICompatibleMessageToolCall extends JsonRecord {
|
|
65
|
+
type: 'function';
|
|
66
|
+
id: string;
|
|
67
|
+
function: {
|
|
68
|
+
arguments: string;
|
|
69
|
+
name: string;
|
|
70
|
+
};
|
|
71
|
+
/**
|
|
72
|
+
* Additional content for provider-specific features.
|
|
73
|
+
* Used by Google Gemini for thought signatures via OpenAI compatibility.
|
|
74
|
+
*/
|
|
75
|
+
extra_content?: {
|
|
76
|
+
google?: {
|
|
77
|
+
thought_signature?: string;
|
|
78
|
+
};
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export interface OpenAICompatibleToolMessage extends JsonRecord {
|
|
83
|
+
role: 'tool';
|
|
84
|
+
content: string;
|
|
85
|
+
tool_call_id: string;
|
|
86
|
+
}
|