@ai-sdk/openai 0.0.24 → 0.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +4 -1
- package/dist/index.d.ts +4 -1
- package/dist/index.js +11 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +11 -4
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +163 -0
- package/internal/dist/index.d.ts +163 -0
- package/internal/dist/index.js +971 -0
- package/internal/dist/index.js.map +1 -0
- package/internal/dist/index.mjs +965 -0
- package/internal/dist/index.mjs.map +1 -0
- package/package.json +10 -3
|
@@ -0,0 +1,965 @@
|
|
|
1
|
+
// src/openai-chat-language-model.ts
|
|
2
|
+
import {
|
|
3
|
+
InvalidResponseDataError,
|
|
4
|
+
UnsupportedFunctionalityError
|
|
5
|
+
} from "@ai-sdk/provider";
|
|
6
|
+
import {
|
|
7
|
+
createEventSourceResponseHandler,
|
|
8
|
+
createJsonResponseHandler,
|
|
9
|
+
generateId,
|
|
10
|
+
isParsableJson,
|
|
11
|
+
postJsonToApi
|
|
12
|
+
} from "@ai-sdk/provider-utils";
|
|
13
|
+
import { z as z2 } from "zod";
|
|
14
|
+
|
|
15
|
+
// src/convert-to-openai-chat-messages.ts
|
|
16
|
+
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
17
|
+
function convertToOpenAIChatMessages(prompt) {
|
|
18
|
+
const messages = [];
|
|
19
|
+
for (const { role, content } of prompt) {
|
|
20
|
+
switch (role) {
|
|
21
|
+
case "system": {
|
|
22
|
+
messages.push({ role: "system", content });
|
|
23
|
+
break;
|
|
24
|
+
}
|
|
25
|
+
case "user": {
|
|
26
|
+
if (content.length === 1 && content[0].type === "text") {
|
|
27
|
+
messages.push({ role: "user", content: content[0].text });
|
|
28
|
+
break;
|
|
29
|
+
}
|
|
30
|
+
messages.push({
|
|
31
|
+
role: "user",
|
|
32
|
+
content: content.map((part) => {
|
|
33
|
+
var _a;
|
|
34
|
+
switch (part.type) {
|
|
35
|
+
case "text": {
|
|
36
|
+
return { type: "text", text: part.text };
|
|
37
|
+
}
|
|
38
|
+
case "image": {
|
|
39
|
+
return {
|
|
40
|
+
type: "image_url",
|
|
41
|
+
image_url: {
|
|
42
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
})
|
|
48
|
+
});
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
case "assistant": {
|
|
52
|
+
let text = "";
|
|
53
|
+
const toolCalls = [];
|
|
54
|
+
for (const part of content) {
|
|
55
|
+
switch (part.type) {
|
|
56
|
+
case "text": {
|
|
57
|
+
text += part.text;
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
case "tool-call": {
|
|
61
|
+
toolCalls.push({
|
|
62
|
+
id: part.toolCallId,
|
|
63
|
+
type: "function",
|
|
64
|
+
function: {
|
|
65
|
+
name: part.toolName,
|
|
66
|
+
arguments: JSON.stringify(part.args)
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
default: {
|
|
72
|
+
const _exhaustiveCheck = part;
|
|
73
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
messages.push({
|
|
78
|
+
role: "assistant",
|
|
79
|
+
content: text,
|
|
80
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
81
|
+
});
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
case "tool": {
|
|
85
|
+
for (const toolResponse of content) {
|
|
86
|
+
messages.push({
|
|
87
|
+
role: "tool",
|
|
88
|
+
tool_call_id: toolResponse.toolCallId,
|
|
89
|
+
content: JSON.stringify(toolResponse.result)
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
break;
|
|
93
|
+
}
|
|
94
|
+
default: {
|
|
95
|
+
const _exhaustiveCheck = role;
|
|
96
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
return messages;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// src/map-openai-chat-logprobs.ts
|
|
104
|
+
function mapOpenAIChatLogProbsOutput(logprobs) {
|
|
105
|
+
var _a, _b;
|
|
106
|
+
return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
|
|
107
|
+
token,
|
|
108
|
+
logprob,
|
|
109
|
+
topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
|
|
110
|
+
token: token2,
|
|
111
|
+
logprob: logprob2
|
|
112
|
+
})) : []
|
|
113
|
+
}))) != null ? _b : void 0;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// src/map-openai-finish-reason.ts
|
|
117
|
+
function mapOpenAIFinishReason(finishReason) {
|
|
118
|
+
switch (finishReason) {
|
|
119
|
+
case "stop":
|
|
120
|
+
return "stop";
|
|
121
|
+
case "length":
|
|
122
|
+
return "length";
|
|
123
|
+
case "content_filter":
|
|
124
|
+
return "content-filter";
|
|
125
|
+
case "function_call":
|
|
126
|
+
case "tool_calls":
|
|
127
|
+
return "tool-calls";
|
|
128
|
+
default:
|
|
129
|
+
return "unknown";
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// src/openai-error.ts
|
|
134
|
+
import { z } from "zod";
|
|
135
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
136
|
+
var openAIErrorDataSchema = z.object({
|
|
137
|
+
error: z.object({
|
|
138
|
+
message: z.string(),
|
|
139
|
+
type: z.string(),
|
|
140
|
+
param: z.any().nullable(),
|
|
141
|
+
code: z.string().nullable()
|
|
142
|
+
})
|
|
143
|
+
});
|
|
144
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
145
|
+
errorSchema: openAIErrorDataSchema,
|
|
146
|
+
errorToMessage: (data) => data.error.message
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
// src/openai-chat-language-model.ts
|
|
150
|
+
var OpenAIChatLanguageModel = class {
|
|
151
|
+
constructor(modelId, settings, config) {
|
|
152
|
+
this.specificationVersion = "v1";
|
|
153
|
+
this.defaultObjectGenerationMode = "tool";
|
|
154
|
+
this.modelId = modelId;
|
|
155
|
+
this.settings = settings;
|
|
156
|
+
this.config = config;
|
|
157
|
+
}
|
|
158
|
+
get provider() {
|
|
159
|
+
return this.config.provider;
|
|
160
|
+
}
|
|
161
|
+
getArgs({
|
|
162
|
+
mode,
|
|
163
|
+
prompt,
|
|
164
|
+
maxTokens,
|
|
165
|
+
temperature,
|
|
166
|
+
topP,
|
|
167
|
+
frequencyPenalty,
|
|
168
|
+
presencePenalty,
|
|
169
|
+
seed
|
|
170
|
+
}) {
|
|
171
|
+
const type = mode.type;
|
|
172
|
+
const baseArgs = {
|
|
173
|
+
// model id:
|
|
174
|
+
model: this.modelId,
|
|
175
|
+
// model specific settings:
|
|
176
|
+
logit_bias: this.settings.logitBias,
|
|
177
|
+
logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number",
|
|
178
|
+
top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
179
|
+
user: this.settings.user,
|
|
180
|
+
// standardized settings:
|
|
181
|
+
max_tokens: maxTokens,
|
|
182
|
+
temperature,
|
|
183
|
+
top_p: topP,
|
|
184
|
+
frequency_penalty: frequencyPenalty,
|
|
185
|
+
presence_penalty: presencePenalty,
|
|
186
|
+
seed,
|
|
187
|
+
// messages:
|
|
188
|
+
messages: convertToOpenAIChatMessages(prompt)
|
|
189
|
+
};
|
|
190
|
+
switch (type) {
|
|
191
|
+
case "regular": {
|
|
192
|
+
return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };
|
|
193
|
+
}
|
|
194
|
+
case "object-json": {
|
|
195
|
+
return {
|
|
196
|
+
...baseArgs,
|
|
197
|
+
response_format: { type: "json_object" }
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
case "object-tool": {
|
|
201
|
+
return {
|
|
202
|
+
...baseArgs,
|
|
203
|
+
tool_choice: { type: "function", function: { name: mode.tool.name } },
|
|
204
|
+
tools: [
|
|
205
|
+
{
|
|
206
|
+
type: "function",
|
|
207
|
+
function: {
|
|
208
|
+
name: mode.tool.name,
|
|
209
|
+
description: mode.tool.description,
|
|
210
|
+
parameters: mode.tool.parameters
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
]
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
case "object-grammar": {
|
|
217
|
+
throw new UnsupportedFunctionalityError({
|
|
218
|
+
functionality: "object-grammar mode"
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
default: {
|
|
222
|
+
const _exhaustiveCheck = type;
|
|
223
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
async doGenerate(options) {
|
|
228
|
+
var _a, _b;
|
|
229
|
+
const args = this.getArgs(options);
|
|
230
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
231
|
+
url: this.config.url({
|
|
232
|
+
path: "/chat/completions",
|
|
233
|
+
modelId: this.modelId
|
|
234
|
+
}),
|
|
235
|
+
headers: this.config.headers(),
|
|
236
|
+
body: args,
|
|
237
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
238
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
239
|
+
openAIChatResponseSchema
|
|
240
|
+
),
|
|
241
|
+
abortSignal: options.abortSignal
|
|
242
|
+
});
|
|
243
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
244
|
+
const choice = response.choices[0];
|
|
245
|
+
return {
|
|
246
|
+
text: (_a = choice.message.content) != null ? _a : void 0,
|
|
247
|
+
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
|
|
248
|
+
var _a2;
|
|
249
|
+
return {
|
|
250
|
+
toolCallType: "function",
|
|
251
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
252
|
+
toolName: toolCall.function.name,
|
|
253
|
+
args: toolCall.function.arguments
|
|
254
|
+
};
|
|
255
|
+
}),
|
|
256
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
257
|
+
usage: {
|
|
258
|
+
promptTokens: response.usage.prompt_tokens,
|
|
259
|
+
completionTokens: response.usage.completion_tokens
|
|
260
|
+
},
|
|
261
|
+
rawCall: { rawPrompt, rawSettings },
|
|
262
|
+
rawResponse: { headers: responseHeaders },
|
|
263
|
+
warnings: [],
|
|
264
|
+
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs)
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
async doStream(options) {
|
|
268
|
+
const args = this.getArgs(options);
|
|
269
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
270
|
+
url: this.config.url({
|
|
271
|
+
path: "/chat/completions",
|
|
272
|
+
modelId: this.modelId
|
|
273
|
+
}),
|
|
274
|
+
headers: this.config.headers(),
|
|
275
|
+
body: {
|
|
276
|
+
...args,
|
|
277
|
+
stream: true,
|
|
278
|
+
// only include stream_options when in strict compatibility mode:
|
|
279
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
280
|
+
},
|
|
281
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
282
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
283
|
+
openaiChatChunkSchema
|
|
284
|
+
),
|
|
285
|
+
abortSignal: options.abortSignal
|
|
286
|
+
});
|
|
287
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
288
|
+
const toolCalls = [];
|
|
289
|
+
let finishReason = "other";
|
|
290
|
+
let usage = {
|
|
291
|
+
promptTokens: Number.NaN,
|
|
292
|
+
completionTokens: Number.NaN
|
|
293
|
+
};
|
|
294
|
+
let logprobs;
|
|
295
|
+
return {
|
|
296
|
+
stream: response.pipeThrough(
|
|
297
|
+
new TransformStream({
|
|
298
|
+
transform(chunk, controller) {
|
|
299
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
300
|
+
if (!chunk.success) {
|
|
301
|
+
finishReason = "error";
|
|
302
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
const value = chunk.value;
|
|
306
|
+
if ("error" in value) {
|
|
307
|
+
finishReason = "error";
|
|
308
|
+
controller.enqueue({ type: "error", error: value.error });
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
if (value.usage != null) {
|
|
312
|
+
usage = {
|
|
313
|
+
promptTokens: value.usage.prompt_tokens,
|
|
314
|
+
completionTokens: value.usage.completion_tokens
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
const choice = value.choices[0];
|
|
318
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
319
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
320
|
+
}
|
|
321
|
+
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
const delta = choice.delta;
|
|
325
|
+
if (delta.content != null) {
|
|
326
|
+
controller.enqueue({
|
|
327
|
+
type: "text-delta",
|
|
328
|
+
textDelta: delta.content
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
const mappedLogprobs = mapOpenAIChatLogProbsOutput(
|
|
332
|
+
choice == null ? void 0 : choice.logprobs
|
|
333
|
+
);
|
|
334
|
+
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
335
|
+
if (logprobs === void 0)
|
|
336
|
+
logprobs = [];
|
|
337
|
+
logprobs.push(...mappedLogprobs);
|
|
338
|
+
}
|
|
339
|
+
if (delta.tool_calls != null) {
|
|
340
|
+
for (const toolCallDelta of delta.tool_calls) {
|
|
341
|
+
const index = toolCallDelta.index;
|
|
342
|
+
if (toolCalls[index] == null) {
|
|
343
|
+
if (toolCallDelta.type !== "function") {
|
|
344
|
+
throw new InvalidResponseDataError({
|
|
345
|
+
data: toolCallDelta,
|
|
346
|
+
message: `Expected 'function' type.`
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
if (toolCallDelta.id == null) {
|
|
350
|
+
throw new InvalidResponseDataError({
|
|
351
|
+
data: toolCallDelta,
|
|
352
|
+
message: `Expected 'id' to be a string.`
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
|
|
356
|
+
throw new InvalidResponseDataError({
|
|
357
|
+
data: toolCallDelta,
|
|
358
|
+
message: `Expected 'function.name' to be a string.`
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
toolCalls[index] = {
|
|
362
|
+
id: toolCallDelta.id,
|
|
363
|
+
type: "function",
|
|
364
|
+
function: {
|
|
365
|
+
name: toolCallDelta.function.name,
|
|
366
|
+
arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
|
|
367
|
+
}
|
|
368
|
+
};
|
|
369
|
+
const toolCall2 = toolCalls[index];
|
|
370
|
+
if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
|
|
371
|
+
controller.enqueue({
|
|
372
|
+
type: "tool-call-delta",
|
|
373
|
+
toolCallType: "function",
|
|
374
|
+
toolCallId: toolCall2.id,
|
|
375
|
+
toolName: toolCall2.function.name,
|
|
376
|
+
argsTextDelta: toolCall2.function.arguments
|
|
377
|
+
});
|
|
378
|
+
controller.enqueue({
|
|
379
|
+
type: "tool-call",
|
|
380
|
+
toolCallType: "function",
|
|
381
|
+
toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
|
|
382
|
+
toolName: toolCall2.function.name,
|
|
383
|
+
args: toolCall2.function.arguments
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
continue;
|
|
387
|
+
}
|
|
388
|
+
const toolCall = toolCalls[index];
|
|
389
|
+
if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
|
|
390
|
+
toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
|
|
391
|
+
}
|
|
392
|
+
controller.enqueue({
|
|
393
|
+
type: "tool-call-delta",
|
|
394
|
+
toolCallType: "function",
|
|
395
|
+
toolCallId: toolCall.id,
|
|
396
|
+
toolName: toolCall.function.name,
|
|
397
|
+
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
398
|
+
});
|
|
399
|
+
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
400
|
+
controller.enqueue({
|
|
401
|
+
type: "tool-call",
|
|
402
|
+
toolCallType: "function",
|
|
403
|
+
toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
|
|
404
|
+
toolName: toolCall.function.name,
|
|
405
|
+
args: toolCall.function.arguments
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
},
|
|
411
|
+
flush(controller) {
|
|
412
|
+
controller.enqueue({
|
|
413
|
+
type: "finish",
|
|
414
|
+
finishReason,
|
|
415
|
+
logprobs,
|
|
416
|
+
usage
|
|
417
|
+
});
|
|
418
|
+
}
|
|
419
|
+
})
|
|
420
|
+
),
|
|
421
|
+
rawCall: { rawPrompt, rawSettings },
|
|
422
|
+
rawResponse: { headers: responseHeaders },
|
|
423
|
+
warnings: []
|
|
424
|
+
};
|
|
425
|
+
}
|
|
426
|
+
};
|
|
427
|
+
var openAIChatResponseSchema = z2.object({
|
|
428
|
+
choices: z2.array(
|
|
429
|
+
z2.object({
|
|
430
|
+
message: z2.object({
|
|
431
|
+
role: z2.literal("assistant"),
|
|
432
|
+
content: z2.string().nullable().optional(),
|
|
433
|
+
tool_calls: z2.array(
|
|
434
|
+
z2.object({
|
|
435
|
+
id: z2.string().optional().nullable(),
|
|
436
|
+
type: z2.literal("function"),
|
|
437
|
+
function: z2.object({
|
|
438
|
+
name: z2.string(),
|
|
439
|
+
arguments: z2.string()
|
|
440
|
+
})
|
|
441
|
+
})
|
|
442
|
+
).optional()
|
|
443
|
+
}),
|
|
444
|
+
index: z2.number(),
|
|
445
|
+
logprobs: z2.object({
|
|
446
|
+
content: z2.array(
|
|
447
|
+
z2.object({
|
|
448
|
+
token: z2.string(),
|
|
449
|
+
logprob: z2.number(),
|
|
450
|
+
top_logprobs: z2.array(
|
|
451
|
+
z2.object({
|
|
452
|
+
token: z2.string(),
|
|
453
|
+
logprob: z2.number()
|
|
454
|
+
})
|
|
455
|
+
)
|
|
456
|
+
})
|
|
457
|
+
).nullable()
|
|
458
|
+
}).nullable().optional(),
|
|
459
|
+
finish_reason: z2.string().optional().nullable()
|
|
460
|
+
})
|
|
461
|
+
),
|
|
462
|
+
usage: z2.object({
|
|
463
|
+
prompt_tokens: z2.number(),
|
|
464
|
+
completion_tokens: z2.number()
|
|
465
|
+
})
|
|
466
|
+
});
|
|
467
|
+
var openaiChatChunkSchema = z2.union([
|
|
468
|
+
z2.object({
|
|
469
|
+
choices: z2.array(
|
|
470
|
+
z2.object({
|
|
471
|
+
delta: z2.object({
|
|
472
|
+
role: z2.enum(["assistant"]).optional(),
|
|
473
|
+
content: z2.string().nullish(),
|
|
474
|
+
tool_calls: z2.array(
|
|
475
|
+
z2.object({
|
|
476
|
+
index: z2.number(),
|
|
477
|
+
id: z2.string().nullish(),
|
|
478
|
+
type: z2.literal("function").optional(),
|
|
479
|
+
function: z2.object({
|
|
480
|
+
name: z2.string().nullish(),
|
|
481
|
+
arguments: z2.string().nullish()
|
|
482
|
+
})
|
|
483
|
+
})
|
|
484
|
+
).nullish()
|
|
485
|
+
}),
|
|
486
|
+
logprobs: z2.object({
|
|
487
|
+
content: z2.array(
|
|
488
|
+
z2.object({
|
|
489
|
+
token: z2.string(),
|
|
490
|
+
logprob: z2.number(),
|
|
491
|
+
top_logprobs: z2.array(
|
|
492
|
+
z2.object({
|
|
493
|
+
token: z2.string(),
|
|
494
|
+
logprob: z2.number()
|
|
495
|
+
})
|
|
496
|
+
)
|
|
497
|
+
})
|
|
498
|
+
).nullable()
|
|
499
|
+
}).nullish(),
|
|
500
|
+
finish_reason: z2.string().nullable().optional(),
|
|
501
|
+
index: z2.number()
|
|
502
|
+
})
|
|
503
|
+
),
|
|
504
|
+
usage: z2.object({
|
|
505
|
+
prompt_tokens: z2.number(),
|
|
506
|
+
completion_tokens: z2.number()
|
|
507
|
+
}).nullish()
|
|
508
|
+
}),
|
|
509
|
+
openAIErrorDataSchema
|
|
510
|
+
]);
|
|
511
|
+
function prepareToolsAndToolChoice(mode) {
|
|
512
|
+
var _a;
|
|
513
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
514
|
+
if (tools == null) {
|
|
515
|
+
return { tools: void 0, tool_choice: void 0 };
|
|
516
|
+
}
|
|
517
|
+
const mappedTools = tools.map((tool) => ({
|
|
518
|
+
type: "function",
|
|
519
|
+
function: {
|
|
520
|
+
name: tool.name,
|
|
521
|
+
description: tool.description,
|
|
522
|
+
parameters: tool.parameters
|
|
523
|
+
}
|
|
524
|
+
}));
|
|
525
|
+
const toolChoice = mode.toolChoice;
|
|
526
|
+
if (toolChoice == null) {
|
|
527
|
+
return { tools: mappedTools, tool_choice: void 0 };
|
|
528
|
+
}
|
|
529
|
+
const type = toolChoice.type;
|
|
530
|
+
switch (type) {
|
|
531
|
+
case "auto":
|
|
532
|
+
case "none":
|
|
533
|
+
case "required":
|
|
534
|
+
return { tools: mappedTools, tool_choice: type };
|
|
535
|
+
case "tool":
|
|
536
|
+
return {
|
|
537
|
+
tools: mappedTools,
|
|
538
|
+
tool_choice: {
|
|
539
|
+
type: "function",
|
|
540
|
+
function: {
|
|
541
|
+
name: toolChoice.toolName
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
};
|
|
545
|
+
default: {
|
|
546
|
+
const _exhaustiveCheck = type;
|
|
547
|
+
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
// src/openai-completion-language-model.ts
|
|
553
|
+
import {
|
|
554
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
555
|
+
} from "@ai-sdk/provider";
|
|
556
|
+
import {
|
|
557
|
+
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
558
|
+
createJsonResponseHandler as createJsonResponseHandler2,
|
|
559
|
+
postJsonToApi as postJsonToApi2
|
|
560
|
+
} from "@ai-sdk/provider-utils";
|
|
561
|
+
import { z as z3 } from "zod";
|
|
562
|
+
|
|
563
|
+
// src/convert-to-openai-completion-prompt.ts
|
|
564
|
+
import {
|
|
565
|
+
InvalidPromptError,
|
|
566
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
567
|
+
} from "@ai-sdk/provider";
|
|
568
|
+
function convertToOpenAICompletionPrompt({
|
|
569
|
+
prompt,
|
|
570
|
+
inputFormat,
|
|
571
|
+
user = "user",
|
|
572
|
+
assistant = "assistant"
|
|
573
|
+
}) {
|
|
574
|
+
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
575
|
+
return { prompt: prompt[0].content[0].text };
|
|
576
|
+
}
|
|
577
|
+
let text = "";
|
|
578
|
+
if (prompt[0].role === "system") {
|
|
579
|
+
text += `${prompt[0].content}
|
|
580
|
+
|
|
581
|
+
`;
|
|
582
|
+
prompt = prompt.slice(1);
|
|
583
|
+
}
|
|
584
|
+
for (const { role, content } of prompt) {
|
|
585
|
+
switch (role) {
|
|
586
|
+
case "system": {
|
|
587
|
+
throw new InvalidPromptError({
|
|
588
|
+
message: "Unexpected system message in prompt: ${content}",
|
|
589
|
+
prompt
|
|
590
|
+
});
|
|
591
|
+
}
|
|
592
|
+
case "user": {
|
|
593
|
+
const userMessage = content.map((part) => {
|
|
594
|
+
switch (part.type) {
|
|
595
|
+
case "text": {
|
|
596
|
+
return part.text;
|
|
597
|
+
}
|
|
598
|
+
case "image": {
|
|
599
|
+
throw new UnsupportedFunctionalityError2({
|
|
600
|
+
functionality: "images"
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
}).join("");
|
|
605
|
+
text += `${user}:
|
|
606
|
+
${userMessage}
|
|
607
|
+
|
|
608
|
+
`;
|
|
609
|
+
break;
|
|
610
|
+
}
|
|
611
|
+
case "assistant": {
|
|
612
|
+
const assistantMessage = content.map((part) => {
|
|
613
|
+
switch (part.type) {
|
|
614
|
+
case "text": {
|
|
615
|
+
return part.text;
|
|
616
|
+
}
|
|
617
|
+
case "tool-call": {
|
|
618
|
+
throw new UnsupportedFunctionalityError2({
|
|
619
|
+
functionality: "tool-call messages"
|
|
620
|
+
});
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
}).join("");
|
|
624
|
+
text += `${assistant}:
|
|
625
|
+
${assistantMessage}
|
|
626
|
+
|
|
627
|
+
`;
|
|
628
|
+
break;
|
|
629
|
+
}
|
|
630
|
+
case "tool": {
|
|
631
|
+
throw new UnsupportedFunctionalityError2({
|
|
632
|
+
functionality: "tool messages"
|
|
633
|
+
});
|
|
634
|
+
}
|
|
635
|
+
default: {
|
|
636
|
+
const _exhaustiveCheck = role;
|
|
637
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
text += `${assistant}:
|
|
642
|
+
`;
|
|
643
|
+
return {
|
|
644
|
+
prompt: text,
|
|
645
|
+
stopSequences: [`
|
|
646
|
+
${user}:`]
|
|
647
|
+
};
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
// src/map-openai-completion-logprobs.ts
|
|
651
|
+
function mapOpenAICompletionLogProbs(logprobs) {
|
|
652
|
+
return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({
|
|
653
|
+
token,
|
|
654
|
+
logprob: logprobs.token_logprobs[index],
|
|
655
|
+
topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map(
|
|
656
|
+
([token2, logprob]) => ({
|
|
657
|
+
token: token2,
|
|
658
|
+
logprob
|
|
659
|
+
})
|
|
660
|
+
) : []
|
|
661
|
+
}));
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
// src/openai-completion-language-model.ts
|
|
665
|
+
var OpenAICompletionLanguageModel = class {
|
|
666
|
+
constructor(modelId, settings, config) {
|
|
667
|
+
this.specificationVersion = "v1";
|
|
668
|
+
this.defaultObjectGenerationMode = void 0;
|
|
669
|
+
this.modelId = modelId;
|
|
670
|
+
this.settings = settings;
|
|
671
|
+
this.config = config;
|
|
672
|
+
}
|
|
673
|
+
get provider() {
|
|
674
|
+
return this.config.provider;
|
|
675
|
+
}
|
|
676
|
+
getArgs({
|
|
677
|
+
mode,
|
|
678
|
+
inputFormat,
|
|
679
|
+
prompt,
|
|
680
|
+
maxTokens,
|
|
681
|
+
temperature,
|
|
682
|
+
topP,
|
|
683
|
+
frequencyPenalty,
|
|
684
|
+
presencePenalty,
|
|
685
|
+
seed
|
|
686
|
+
}) {
|
|
687
|
+
var _a;
|
|
688
|
+
const type = mode.type;
|
|
689
|
+
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
690
|
+
const baseArgs = {
|
|
691
|
+
// model id:
|
|
692
|
+
model: this.modelId,
|
|
693
|
+
// model specific settings:
|
|
694
|
+
echo: this.settings.echo,
|
|
695
|
+
logit_bias: this.settings.logitBias,
|
|
696
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
697
|
+
suffix: this.settings.suffix,
|
|
698
|
+
user: this.settings.user,
|
|
699
|
+
// standardized settings:
|
|
700
|
+
max_tokens: maxTokens,
|
|
701
|
+
temperature,
|
|
702
|
+
top_p: topP,
|
|
703
|
+
frequency_penalty: frequencyPenalty,
|
|
704
|
+
presence_penalty: presencePenalty,
|
|
705
|
+
seed,
|
|
706
|
+
// prompt:
|
|
707
|
+
prompt: completionPrompt,
|
|
708
|
+
// stop sequences:
|
|
709
|
+
stop: stopSequences
|
|
710
|
+
};
|
|
711
|
+
switch (type) {
|
|
712
|
+
case "regular": {
|
|
713
|
+
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
714
|
+
throw new UnsupportedFunctionalityError3({
|
|
715
|
+
functionality: "tools"
|
|
716
|
+
});
|
|
717
|
+
}
|
|
718
|
+
if (mode.toolChoice) {
|
|
719
|
+
throw new UnsupportedFunctionalityError3({
|
|
720
|
+
functionality: "toolChoice"
|
|
721
|
+
});
|
|
722
|
+
}
|
|
723
|
+
return baseArgs;
|
|
724
|
+
}
|
|
725
|
+
case "object-json": {
|
|
726
|
+
throw new UnsupportedFunctionalityError3({
|
|
727
|
+
functionality: "object-json mode"
|
|
728
|
+
});
|
|
729
|
+
}
|
|
730
|
+
case "object-tool": {
|
|
731
|
+
throw new UnsupportedFunctionalityError3({
|
|
732
|
+
functionality: "object-tool mode"
|
|
733
|
+
});
|
|
734
|
+
}
|
|
735
|
+
case "object-grammar": {
|
|
736
|
+
throw new UnsupportedFunctionalityError3({
|
|
737
|
+
functionality: "object-grammar mode"
|
|
738
|
+
});
|
|
739
|
+
}
|
|
740
|
+
default: {
|
|
741
|
+
const _exhaustiveCheck = type;
|
|
742
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
async doGenerate(options) {
|
|
747
|
+
const args = this.getArgs(options);
|
|
748
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
749
|
+
url: `${this.config.baseURL}/completions`,
|
|
750
|
+
headers: this.config.headers(),
|
|
751
|
+
body: args,
|
|
752
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
753
|
+
successfulResponseHandler: createJsonResponseHandler2(
|
|
754
|
+
openAICompletionResponseSchema
|
|
755
|
+
),
|
|
756
|
+
abortSignal: options.abortSignal
|
|
757
|
+
});
|
|
758
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
759
|
+
const choice = response.choices[0];
|
|
760
|
+
return {
|
|
761
|
+
text: choice.text,
|
|
762
|
+
usage: {
|
|
763
|
+
promptTokens: response.usage.prompt_tokens,
|
|
764
|
+
completionTokens: response.usage.completion_tokens
|
|
765
|
+
},
|
|
766
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
767
|
+
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
768
|
+
rawCall: { rawPrompt, rawSettings },
|
|
769
|
+
rawResponse: { headers: responseHeaders },
|
|
770
|
+
warnings: []
|
|
771
|
+
};
|
|
772
|
+
}
|
|
773
|
+
async doStream(options) {
|
|
774
|
+
const args = this.getArgs(options);
|
|
775
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
776
|
+
url: `${this.config.baseURL}/completions`,
|
|
777
|
+
headers: this.config.headers(),
|
|
778
|
+
body: {
|
|
779
|
+
...this.getArgs(options),
|
|
780
|
+
stream: true,
|
|
781
|
+
// only include stream_options when in strict compatibility mode:
|
|
782
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
783
|
+
},
|
|
784
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
785
|
+
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
786
|
+
openaiCompletionChunkSchema
|
|
787
|
+
),
|
|
788
|
+
abortSignal: options.abortSignal
|
|
789
|
+
});
|
|
790
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
791
|
+
let finishReason = "other";
|
|
792
|
+
let usage = {
|
|
793
|
+
promptTokens: Number.NaN,
|
|
794
|
+
completionTokens: Number.NaN
|
|
795
|
+
};
|
|
796
|
+
let logprobs;
|
|
797
|
+
return {
|
|
798
|
+
stream: response.pipeThrough(
|
|
799
|
+
new TransformStream({
|
|
800
|
+
transform(chunk, controller) {
|
|
801
|
+
if (!chunk.success) {
|
|
802
|
+
finishReason = "error";
|
|
803
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
804
|
+
return;
|
|
805
|
+
}
|
|
806
|
+
const value = chunk.value;
|
|
807
|
+
if ("error" in value) {
|
|
808
|
+
finishReason = "error";
|
|
809
|
+
controller.enqueue({ type: "error", error: value.error });
|
|
810
|
+
return;
|
|
811
|
+
}
|
|
812
|
+
if (value.usage != null) {
|
|
813
|
+
usage = {
|
|
814
|
+
promptTokens: value.usage.prompt_tokens,
|
|
815
|
+
completionTokens: value.usage.completion_tokens
|
|
816
|
+
};
|
|
817
|
+
}
|
|
818
|
+
const choice = value.choices[0];
|
|
819
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
820
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
821
|
+
}
|
|
822
|
+
if ((choice == null ? void 0 : choice.text) != null) {
|
|
823
|
+
controller.enqueue({
|
|
824
|
+
type: "text-delta",
|
|
825
|
+
textDelta: choice.text
|
|
826
|
+
});
|
|
827
|
+
}
|
|
828
|
+
const mappedLogprobs = mapOpenAICompletionLogProbs(
|
|
829
|
+
choice == null ? void 0 : choice.logprobs
|
|
830
|
+
);
|
|
831
|
+
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
832
|
+
if (logprobs === void 0)
|
|
833
|
+
logprobs = [];
|
|
834
|
+
logprobs.push(...mappedLogprobs);
|
|
835
|
+
}
|
|
836
|
+
},
|
|
837
|
+
flush(controller) {
|
|
838
|
+
controller.enqueue({
|
|
839
|
+
type: "finish",
|
|
840
|
+
finishReason,
|
|
841
|
+
logprobs,
|
|
842
|
+
usage
|
|
843
|
+
});
|
|
844
|
+
}
|
|
845
|
+
})
|
|
846
|
+
),
|
|
847
|
+
rawCall: { rawPrompt, rawSettings },
|
|
848
|
+
rawResponse: { headers: responseHeaders },
|
|
849
|
+
warnings: []
|
|
850
|
+
};
|
|
851
|
+
}
|
|
852
|
+
};
|
|
853
|
+
var openAICompletionResponseSchema = z3.object({
|
|
854
|
+
choices: z3.array(
|
|
855
|
+
z3.object({
|
|
856
|
+
text: z3.string(),
|
|
857
|
+
finish_reason: z3.string(),
|
|
858
|
+
logprobs: z3.object({
|
|
859
|
+
tokens: z3.array(z3.string()),
|
|
860
|
+
token_logprobs: z3.array(z3.number()),
|
|
861
|
+
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
862
|
+
}).nullable().optional()
|
|
863
|
+
})
|
|
864
|
+
),
|
|
865
|
+
usage: z3.object({
|
|
866
|
+
prompt_tokens: z3.number(),
|
|
867
|
+
completion_tokens: z3.number()
|
|
868
|
+
})
|
|
869
|
+
});
|
|
870
|
+
var openaiCompletionChunkSchema = z3.union([
|
|
871
|
+
z3.object({
|
|
872
|
+
choices: z3.array(
|
|
873
|
+
z3.object({
|
|
874
|
+
text: z3.string(),
|
|
875
|
+
finish_reason: z3.string().nullish(),
|
|
876
|
+
index: z3.number(),
|
|
877
|
+
logprobs: z3.object({
|
|
878
|
+
tokens: z3.array(z3.string()),
|
|
879
|
+
token_logprobs: z3.array(z3.number()),
|
|
880
|
+
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
881
|
+
}).nullable().optional()
|
|
882
|
+
})
|
|
883
|
+
),
|
|
884
|
+
usage: z3.object({
|
|
885
|
+
prompt_tokens: z3.number(),
|
|
886
|
+
completion_tokens: z3.number()
|
|
887
|
+
}).optional().nullable()
|
|
888
|
+
}),
|
|
889
|
+
openAIErrorDataSchema
|
|
890
|
+
]);
|
|
891
|
+
|
|
892
|
+
// src/openai-embedding-model.ts
|
|
893
|
+
import {
|
|
894
|
+
TooManyEmbeddingValuesForCallError
|
|
895
|
+
} from "@ai-sdk/provider";
|
|
896
|
+
import {
|
|
897
|
+
createJsonResponseHandler as createJsonResponseHandler3,
|
|
898
|
+
postJsonToApi as postJsonToApi3
|
|
899
|
+
} from "@ai-sdk/provider-utils";
|
|
900
|
+
import { z as z4 } from "zod";
|
|
901
|
+
var OpenAIEmbeddingModel = class {
|
|
902
|
+
constructor(modelId, settings, config) {
|
|
903
|
+
this.specificationVersion = "v1";
|
|
904
|
+
this.modelId = modelId;
|
|
905
|
+
this.settings = settings;
|
|
906
|
+
this.config = config;
|
|
907
|
+
}
|
|
908
|
+
get provider() {
|
|
909
|
+
return this.config.provider;
|
|
910
|
+
}
|
|
911
|
+
get maxEmbeddingsPerCall() {
|
|
912
|
+
var _a;
|
|
913
|
+
return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
|
|
914
|
+
}
|
|
915
|
+
get supportsParallelCalls() {
|
|
916
|
+
var _a;
|
|
917
|
+
return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
|
|
918
|
+
}
|
|
919
|
+
async doEmbed({
|
|
920
|
+
values,
|
|
921
|
+
abortSignal
|
|
922
|
+
}) {
|
|
923
|
+
if (values.length > this.maxEmbeddingsPerCall) {
|
|
924
|
+
throw new TooManyEmbeddingValuesForCallError({
|
|
925
|
+
provider: this.provider,
|
|
926
|
+
modelId: this.modelId,
|
|
927
|
+
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
928
|
+
values
|
|
929
|
+
});
|
|
930
|
+
}
|
|
931
|
+
const { responseHeaders, value: response } = await postJsonToApi3({
|
|
932
|
+
url: `${this.config.baseURL}/embeddings`,
|
|
933
|
+
headers: this.config.headers(),
|
|
934
|
+
body: {
|
|
935
|
+
model: this.modelId,
|
|
936
|
+
input: values,
|
|
937
|
+
encoding_format: "float",
|
|
938
|
+
dimensions: this.settings.dimensions,
|
|
939
|
+
user: this.settings.user
|
|
940
|
+
},
|
|
941
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
942
|
+
successfulResponseHandler: createJsonResponseHandler3(
|
|
943
|
+
openaiTextEmbeddingResponseSchema
|
|
944
|
+
),
|
|
945
|
+
abortSignal
|
|
946
|
+
});
|
|
947
|
+
return {
|
|
948
|
+
embeddings: response.data.map((item) => item.embedding),
|
|
949
|
+
rawResponse: { headers: responseHeaders }
|
|
950
|
+
};
|
|
951
|
+
}
|
|
952
|
+
};
|
|
953
|
+
var openaiTextEmbeddingResponseSchema = z4.object({
|
|
954
|
+
data: z4.array(
|
|
955
|
+
z4.object({
|
|
956
|
+
embedding: z4.array(z4.number())
|
|
957
|
+
})
|
|
958
|
+
)
|
|
959
|
+
});
|
|
960
|
+
export {
|
|
961
|
+
OpenAIChatLanguageModel,
|
|
962
|
+
OpenAICompletionLanguageModel,
|
|
963
|
+
OpenAIEmbeddingModel
|
|
964
|
+
};
|
|
965
|
+
//# sourceMappingURL=index.mjs.map
|