@ai-sdk/openai 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +80 -0
- package/dist/index.d.mts +116 -0
- package/dist/index.d.ts +116 -0
- package/dist/index.js +794 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +786 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +67 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,794 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
OpenAI: () => OpenAI,
|
|
24
|
+
openai: () => openai
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(src_exports);
|
|
27
|
+
|
|
28
|
+
// src/openai-facade.ts
|
|
29
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
30
|
+
|
|
31
|
+
// src/openai-chat-language-model.ts
|
|
32
|
+
var import_provider = require("@ai-sdk/provider");
|
|
33
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_zod2 = require("zod");
|
|
35
|
+
|
|
36
|
+
// src/convert-to-openai-chat-messages.ts
|
|
37
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
38
|
+
function convertToOpenAIChatMessages(prompt) {
|
|
39
|
+
const messages = [];
|
|
40
|
+
for (const { role, content } of prompt) {
|
|
41
|
+
switch (role) {
|
|
42
|
+
case "system": {
|
|
43
|
+
messages.push({ role: "system", content });
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
case "user": {
|
|
47
|
+
messages.push({
|
|
48
|
+
role: "user",
|
|
49
|
+
content: content.map((part) => {
|
|
50
|
+
var _a;
|
|
51
|
+
switch (part.type) {
|
|
52
|
+
case "text": {
|
|
53
|
+
return { type: "text", text: part.text };
|
|
54
|
+
}
|
|
55
|
+
case "image": {
|
|
56
|
+
return {
|
|
57
|
+
type: "image_url",
|
|
58
|
+
image_url: {
|
|
59
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
})
|
|
65
|
+
});
|
|
66
|
+
break;
|
|
67
|
+
}
|
|
68
|
+
case "assistant": {
|
|
69
|
+
let text = "";
|
|
70
|
+
const toolCalls = [];
|
|
71
|
+
for (const part of content) {
|
|
72
|
+
switch (part.type) {
|
|
73
|
+
case "text": {
|
|
74
|
+
text += part.text;
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
case "tool-call": {
|
|
78
|
+
toolCalls.push({
|
|
79
|
+
id: part.toolCallId,
|
|
80
|
+
type: "function",
|
|
81
|
+
function: {
|
|
82
|
+
name: part.toolName,
|
|
83
|
+
arguments: JSON.stringify(part.args)
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
default: {
|
|
89
|
+
const _exhaustiveCheck = part;
|
|
90
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
messages.push({
|
|
95
|
+
role: "assistant",
|
|
96
|
+
content: text,
|
|
97
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
98
|
+
});
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
case "tool": {
|
|
102
|
+
for (const toolResponse of content) {
|
|
103
|
+
messages.push({
|
|
104
|
+
role: "tool",
|
|
105
|
+
tool_call_id: toolResponse.toolCallId,
|
|
106
|
+
content: JSON.stringify(toolResponse.result)
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
break;
|
|
110
|
+
}
|
|
111
|
+
default: {
|
|
112
|
+
const _exhaustiveCheck = role;
|
|
113
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return messages;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// src/map-openai-finish-reason.ts
|
|
121
|
+
function mapOpenAIFinishReason(finishReason) {
|
|
122
|
+
switch (finishReason) {
|
|
123
|
+
case "stop":
|
|
124
|
+
return "stop";
|
|
125
|
+
case "length":
|
|
126
|
+
return "length";
|
|
127
|
+
case "content_filter":
|
|
128
|
+
return "content-filter";
|
|
129
|
+
case "function_call":
|
|
130
|
+
case "tool_calls":
|
|
131
|
+
return "tool-calls";
|
|
132
|
+
default:
|
|
133
|
+
return "other";
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// src/openai-error.ts
|
|
138
|
+
var import_zod = require("zod");
|
|
139
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
140
|
+
var openAIErrorDataSchema = import_zod.z.object({
|
|
141
|
+
error: import_zod.z.object({
|
|
142
|
+
message: import_zod.z.string(),
|
|
143
|
+
type: import_zod.z.string(),
|
|
144
|
+
param: import_zod.z.any().nullable(),
|
|
145
|
+
code: import_zod.z.string().nullable()
|
|
146
|
+
})
|
|
147
|
+
});
|
|
148
|
+
var openaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
|
|
149
|
+
errorSchema: openAIErrorDataSchema,
|
|
150
|
+
errorToMessage: (data) => data.error.message
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
// src/openai-chat-language-model.ts
|
|
154
|
+
var OpenAIChatLanguageModel = class {
|
|
155
|
+
constructor(modelId, settings, config) {
|
|
156
|
+
this.specificationVersion = "v1";
|
|
157
|
+
this.defaultObjectGenerationMode = "tool";
|
|
158
|
+
this.modelId = modelId;
|
|
159
|
+
this.settings = settings;
|
|
160
|
+
this.config = config;
|
|
161
|
+
}
|
|
162
|
+
get provider() {
|
|
163
|
+
return this.config.provider;
|
|
164
|
+
}
|
|
165
|
+
getArgs({
|
|
166
|
+
mode,
|
|
167
|
+
prompt,
|
|
168
|
+
maxTokens,
|
|
169
|
+
temperature,
|
|
170
|
+
topP,
|
|
171
|
+
frequencyPenalty,
|
|
172
|
+
presencePenalty,
|
|
173
|
+
seed
|
|
174
|
+
}) {
|
|
175
|
+
var _a;
|
|
176
|
+
const type = mode.type;
|
|
177
|
+
const baseArgs = {
|
|
178
|
+
// model id:
|
|
179
|
+
model: this.modelId,
|
|
180
|
+
// model specific settings:
|
|
181
|
+
logit_bias: this.settings.logitBias,
|
|
182
|
+
user: this.settings.user,
|
|
183
|
+
// standardized settings:
|
|
184
|
+
max_tokens: maxTokens,
|
|
185
|
+
temperature: (0, import_provider_utils3.scale)({
|
|
186
|
+
value: temperature,
|
|
187
|
+
outputMin: 0,
|
|
188
|
+
outputMax: 2
|
|
189
|
+
}),
|
|
190
|
+
top_p: topP,
|
|
191
|
+
frequency_penalty: (0, import_provider_utils3.scale)({
|
|
192
|
+
value: frequencyPenalty,
|
|
193
|
+
inputMin: -1,
|
|
194
|
+
inputMax: 1,
|
|
195
|
+
outputMin: -2,
|
|
196
|
+
outputMax: 2
|
|
197
|
+
}),
|
|
198
|
+
presence_penalty: (0, import_provider_utils3.scale)({
|
|
199
|
+
value: presencePenalty,
|
|
200
|
+
inputMin: -1,
|
|
201
|
+
inputMax: 1,
|
|
202
|
+
outputMin: -2,
|
|
203
|
+
outputMax: 2
|
|
204
|
+
}),
|
|
205
|
+
seed,
|
|
206
|
+
// messages:
|
|
207
|
+
messages: convertToOpenAIChatMessages(prompt)
|
|
208
|
+
};
|
|
209
|
+
switch (type) {
|
|
210
|
+
case "regular": {
|
|
211
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
212
|
+
return {
|
|
213
|
+
...baseArgs,
|
|
214
|
+
tools: tools == null ? void 0 : tools.map((tool) => ({
|
|
215
|
+
type: "function",
|
|
216
|
+
function: {
|
|
217
|
+
name: tool.name,
|
|
218
|
+
description: tool.description,
|
|
219
|
+
parameters: tool.parameters
|
|
220
|
+
}
|
|
221
|
+
}))
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
case "object-json": {
|
|
225
|
+
return {
|
|
226
|
+
...baseArgs,
|
|
227
|
+
response_format: { type: "json_object" }
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
case "object-tool": {
|
|
231
|
+
return {
|
|
232
|
+
...baseArgs,
|
|
233
|
+
tool_choice: { type: "function", function: { name: mode.tool.name } },
|
|
234
|
+
tools: [
|
|
235
|
+
{
|
|
236
|
+
type: "function",
|
|
237
|
+
function: {
|
|
238
|
+
name: mode.tool.name,
|
|
239
|
+
description: mode.tool.description,
|
|
240
|
+
parameters: mode.tool.parameters
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
]
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
case "object-grammar": {
|
|
247
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
248
|
+
functionality: "object-grammar mode"
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
default: {
|
|
252
|
+
const _exhaustiveCheck = type;
|
|
253
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
async doGenerate(options) {
|
|
258
|
+
var _a, _b;
|
|
259
|
+
const args = this.getArgs(options);
|
|
260
|
+
const response = await (0, import_provider_utils3.postJsonToApi)({
|
|
261
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
262
|
+
headers: this.config.headers(),
|
|
263
|
+
body: args,
|
|
264
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
265
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
266
|
+
openAIChatResponseSchema
|
|
267
|
+
),
|
|
268
|
+
abortSignal: options.abortSignal
|
|
269
|
+
});
|
|
270
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
271
|
+
const choice = response.choices[0];
|
|
272
|
+
return {
|
|
273
|
+
text: (_a = choice.message.content) != null ? _a : void 0,
|
|
274
|
+
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
|
|
275
|
+
toolCallType: "function",
|
|
276
|
+
toolCallId: toolCall.id,
|
|
277
|
+
toolName: toolCall.function.name,
|
|
278
|
+
args: toolCall.function.arguments
|
|
279
|
+
})),
|
|
280
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
281
|
+
usage: {
|
|
282
|
+
promptTokens: response.usage.prompt_tokens,
|
|
283
|
+
completionTokens: response.usage.completion_tokens
|
|
284
|
+
},
|
|
285
|
+
rawCall: { rawPrompt, rawSettings },
|
|
286
|
+
warnings: []
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
async doStream(options) {
|
|
290
|
+
const args = this.getArgs(options);
|
|
291
|
+
const response = await (0, import_provider_utils3.postJsonToApi)({
|
|
292
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
293
|
+
headers: this.config.headers(),
|
|
294
|
+
body: {
|
|
295
|
+
...args,
|
|
296
|
+
stream: true
|
|
297
|
+
},
|
|
298
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
299
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
300
|
+
openaiChatChunkSchema
|
|
301
|
+
),
|
|
302
|
+
abortSignal: options.abortSignal
|
|
303
|
+
});
|
|
304
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
305
|
+
const toolCalls = [];
|
|
306
|
+
let finishReason = "other";
|
|
307
|
+
let usage = {
|
|
308
|
+
promptTokens: Number.NaN,
|
|
309
|
+
completionTokens: Number.NaN
|
|
310
|
+
};
|
|
311
|
+
return {
|
|
312
|
+
stream: response.pipeThrough(
|
|
313
|
+
new TransformStream({
|
|
314
|
+
transform(chunk, controller) {
|
|
315
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
316
|
+
if (!chunk.success) {
|
|
317
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
const value = chunk.value;
|
|
321
|
+
if (value.usage != null) {
|
|
322
|
+
usage = {
|
|
323
|
+
promptTokens: value.usage.prompt_tokens,
|
|
324
|
+
completionTokens: value.usage.completion_tokens
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
const choice = value.choices[0];
|
|
328
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
329
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
330
|
+
}
|
|
331
|
+
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
const delta = choice.delta;
|
|
335
|
+
if (delta.content != null) {
|
|
336
|
+
controller.enqueue({
|
|
337
|
+
type: "text-delta",
|
|
338
|
+
textDelta: delta.content
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
if (delta.tool_calls != null) {
|
|
342
|
+
for (const toolCallDelta of delta.tool_calls) {
|
|
343
|
+
const index = toolCallDelta.index;
|
|
344
|
+
if (toolCalls[index] == null) {
|
|
345
|
+
if (toolCallDelta.type !== "function") {
|
|
346
|
+
throw new import_provider.InvalidResponseDataError({
|
|
347
|
+
data: toolCallDelta,
|
|
348
|
+
message: `Expected 'function' type.`
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
if (toolCallDelta.id == null) {
|
|
352
|
+
throw new import_provider.InvalidResponseDataError({
|
|
353
|
+
data: toolCallDelta,
|
|
354
|
+
message: `Expected 'id' to be a string.`
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
|
|
358
|
+
throw new import_provider.InvalidResponseDataError({
|
|
359
|
+
data: toolCallDelta,
|
|
360
|
+
message: `Expected 'function.name' to be a string.`
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
toolCalls[index] = {
|
|
364
|
+
id: toolCallDelta.id,
|
|
365
|
+
type: "function",
|
|
366
|
+
function: {
|
|
367
|
+
name: toolCallDelta.function.name,
|
|
368
|
+
arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
|
|
369
|
+
}
|
|
370
|
+
};
|
|
371
|
+
continue;
|
|
372
|
+
}
|
|
373
|
+
const toolCall = toolCalls[index];
|
|
374
|
+
if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
|
|
375
|
+
toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
|
|
376
|
+
}
|
|
377
|
+
controller.enqueue({
|
|
378
|
+
type: "tool-call-delta",
|
|
379
|
+
toolCallType: "function",
|
|
380
|
+
toolCallId: toolCall.id,
|
|
381
|
+
toolName: toolCall.function.name,
|
|
382
|
+
argsTextDelta: (_f = toolCallDelta.function.arguments) != null ? _f : ""
|
|
383
|
+
});
|
|
384
|
+
if (((_g = toolCall.function) == null ? void 0 : _g.name) == null || ((_h = toolCall.function) == null ? void 0 : _h.arguments) == null || !(0, import_provider_utils3.isParseableJson)(toolCall.function.arguments)) {
|
|
385
|
+
continue;
|
|
386
|
+
}
|
|
387
|
+
controller.enqueue({
|
|
388
|
+
type: "tool-call",
|
|
389
|
+
toolCallType: "function",
|
|
390
|
+
toolCallId: (_i = toolCall.id) != null ? _i : (0, import_provider_utils3.generateId)(),
|
|
391
|
+
toolName: toolCall.function.name,
|
|
392
|
+
args: toolCall.function.arguments
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
},
|
|
397
|
+
flush(controller) {
|
|
398
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
399
|
+
}
|
|
400
|
+
})
|
|
401
|
+
),
|
|
402
|
+
rawCall: { rawPrompt, rawSettings },
|
|
403
|
+
warnings: []
|
|
404
|
+
};
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
var openAIChatResponseSchema = import_zod2.z.object({
|
|
408
|
+
choices: import_zod2.z.array(
|
|
409
|
+
import_zod2.z.object({
|
|
410
|
+
message: import_zod2.z.object({
|
|
411
|
+
role: import_zod2.z.literal("assistant"),
|
|
412
|
+
content: import_zod2.z.string().nullable(),
|
|
413
|
+
tool_calls: import_zod2.z.array(
|
|
414
|
+
import_zod2.z.object({
|
|
415
|
+
id: import_zod2.z.string(),
|
|
416
|
+
type: import_zod2.z.literal("function"),
|
|
417
|
+
function: import_zod2.z.object({
|
|
418
|
+
name: import_zod2.z.string(),
|
|
419
|
+
arguments: import_zod2.z.string()
|
|
420
|
+
})
|
|
421
|
+
})
|
|
422
|
+
).optional()
|
|
423
|
+
}),
|
|
424
|
+
index: import_zod2.z.number(),
|
|
425
|
+
finish_reason: import_zod2.z.string().optional().nullable()
|
|
426
|
+
})
|
|
427
|
+
),
|
|
428
|
+
object: import_zod2.z.literal("chat.completion"),
|
|
429
|
+
usage: import_zod2.z.object({
|
|
430
|
+
prompt_tokens: import_zod2.z.number(),
|
|
431
|
+
completion_tokens: import_zod2.z.number()
|
|
432
|
+
})
|
|
433
|
+
});
|
|
434
|
+
var openaiChatChunkSchema = import_zod2.z.object({
|
|
435
|
+
object: import_zod2.z.literal("chat.completion.chunk"),
|
|
436
|
+
choices: import_zod2.z.array(
|
|
437
|
+
import_zod2.z.object({
|
|
438
|
+
delta: import_zod2.z.object({
|
|
439
|
+
role: import_zod2.z.enum(["assistant"]).optional(),
|
|
440
|
+
content: import_zod2.z.string().nullable().optional(),
|
|
441
|
+
tool_calls: import_zod2.z.array(
|
|
442
|
+
import_zod2.z.object({
|
|
443
|
+
index: import_zod2.z.number(),
|
|
444
|
+
id: import_zod2.z.string().optional(),
|
|
445
|
+
type: import_zod2.z.literal("function").optional(),
|
|
446
|
+
function: import_zod2.z.object({
|
|
447
|
+
name: import_zod2.z.string().optional(),
|
|
448
|
+
arguments: import_zod2.z.string().optional()
|
|
449
|
+
})
|
|
450
|
+
})
|
|
451
|
+
).optional()
|
|
452
|
+
}),
|
|
453
|
+
finish_reason: import_zod2.z.string().nullable().optional(),
|
|
454
|
+
index: import_zod2.z.number()
|
|
455
|
+
})
|
|
456
|
+
),
|
|
457
|
+
usage: import_zod2.z.object({
|
|
458
|
+
prompt_tokens: import_zod2.z.number(),
|
|
459
|
+
completion_tokens: import_zod2.z.number()
|
|
460
|
+
}).optional().nullable()
|
|
461
|
+
});
|
|
462
|
+
|
|
463
|
+
// src/openai-completion-language-model.ts
|
|
464
|
+
var import_provider3 = require("@ai-sdk/provider");
|
|
465
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
466
|
+
var import_zod3 = require("zod");
|
|
467
|
+
|
|
468
|
+
// src/convert-to-openai-completion-prompt.ts
|
|
469
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
470
|
+
function convertToOpenAICompletionPrompt({
|
|
471
|
+
prompt,
|
|
472
|
+
inputFormat,
|
|
473
|
+
user = "user",
|
|
474
|
+
assistant = "assistant"
|
|
475
|
+
}) {
|
|
476
|
+
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
477
|
+
return { prompt: prompt[0].content[0].text };
|
|
478
|
+
}
|
|
479
|
+
let text = "";
|
|
480
|
+
if (prompt[0].role === "system") {
|
|
481
|
+
text += `${prompt[0].content}
|
|
482
|
+
|
|
483
|
+
`;
|
|
484
|
+
prompt = prompt.slice(1);
|
|
485
|
+
}
|
|
486
|
+
for (const { role, content } of prompt) {
|
|
487
|
+
switch (role) {
|
|
488
|
+
case "system": {
|
|
489
|
+
throw new import_provider2.InvalidPromptError({
|
|
490
|
+
message: "Unexpected system message in prompt: ${content}",
|
|
491
|
+
prompt
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
case "user": {
|
|
495
|
+
const userMessage = content.map((part) => {
|
|
496
|
+
switch (part.type) {
|
|
497
|
+
case "text": {
|
|
498
|
+
return part.text;
|
|
499
|
+
}
|
|
500
|
+
case "image": {
|
|
501
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
502
|
+
functionality: "images"
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
}).join("");
|
|
507
|
+
text += `${user}:
|
|
508
|
+
${userMessage}
|
|
509
|
+
|
|
510
|
+
`;
|
|
511
|
+
break;
|
|
512
|
+
}
|
|
513
|
+
case "assistant": {
|
|
514
|
+
const assistantMessage = content.map((part) => {
|
|
515
|
+
switch (part.type) {
|
|
516
|
+
case "text": {
|
|
517
|
+
return part.text;
|
|
518
|
+
}
|
|
519
|
+
case "tool-call": {
|
|
520
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
521
|
+
functionality: "tool-call messages"
|
|
522
|
+
});
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
}).join("");
|
|
526
|
+
text += `${assistant}:
|
|
527
|
+
${assistantMessage}
|
|
528
|
+
|
|
529
|
+
`;
|
|
530
|
+
break;
|
|
531
|
+
}
|
|
532
|
+
case "tool": {
|
|
533
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
534
|
+
functionality: "tool messages"
|
|
535
|
+
});
|
|
536
|
+
}
|
|
537
|
+
default: {
|
|
538
|
+
const _exhaustiveCheck = role;
|
|
539
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
text += `${assistant}:
|
|
544
|
+
`;
|
|
545
|
+
return {
|
|
546
|
+
prompt: text,
|
|
547
|
+
stopSequences: [`
|
|
548
|
+
${user}:`]
|
|
549
|
+
};
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
// src/openai-completion-language-model.ts
|
|
553
|
+
var OpenAICompletionLanguageModel = class {
|
|
554
|
+
constructor(modelId, settings, config) {
|
|
555
|
+
this.specificationVersion = "v1";
|
|
556
|
+
this.defaultObjectGenerationMode = void 0;
|
|
557
|
+
this.modelId = modelId;
|
|
558
|
+
this.settings = settings;
|
|
559
|
+
this.config = config;
|
|
560
|
+
}
|
|
561
|
+
get provider() {
|
|
562
|
+
return this.config.provider;
|
|
563
|
+
}
|
|
564
|
+
getArgs({
|
|
565
|
+
mode,
|
|
566
|
+
inputFormat,
|
|
567
|
+
prompt,
|
|
568
|
+
maxTokens,
|
|
569
|
+
temperature,
|
|
570
|
+
topP,
|
|
571
|
+
frequencyPenalty,
|
|
572
|
+
presencePenalty,
|
|
573
|
+
seed
|
|
574
|
+
}) {
|
|
575
|
+
var _a;
|
|
576
|
+
const type = mode.type;
|
|
577
|
+
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
578
|
+
const baseArgs = {
|
|
579
|
+
// model id:
|
|
580
|
+
model: this.modelId,
|
|
581
|
+
// model specific settings:
|
|
582
|
+
echo: this.settings.echo,
|
|
583
|
+
logit_bias: this.settings.logitBias,
|
|
584
|
+
suffix: this.settings.suffix,
|
|
585
|
+
user: this.settings.user,
|
|
586
|
+
// standardized settings:
|
|
587
|
+
max_tokens: maxTokens,
|
|
588
|
+
temperature: (0, import_provider_utils4.scale)({
|
|
589
|
+
value: temperature,
|
|
590
|
+
outputMin: 0,
|
|
591
|
+
outputMax: 2
|
|
592
|
+
}),
|
|
593
|
+
top_p: topP,
|
|
594
|
+
frequency_penalty: (0, import_provider_utils4.scale)({
|
|
595
|
+
value: frequencyPenalty,
|
|
596
|
+
inputMin: -1,
|
|
597
|
+
inputMax: 1,
|
|
598
|
+
outputMin: -2,
|
|
599
|
+
outputMax: 2
|
|
600
|
+
}),
|
|
601
|
+
presence_penalty: (0, import_provider_utils4.scale)({
|
|
602
|
+
value: presencePenalty,
|
|
603
|
+
inputMin: -1,
|
|
604
|
+
inputMax: 1,
|
|
605
|
+
outputMin: -2,
|
|
606
|
+
outputMax: 2
|
|
607
|
+
}),
|
|
608
|
+
seed,
|
|
609
|
+
// prompt:
|
|
610
|
+
prompt: completionPrompt,
|
|
611
|
+
// stop sequences:
|
|
612
|
+
stop: stopSequences
|
|
613
|
+
};
|
|
614
|
+
switch (type) {
|
|
615
|
+
case "regular": {
|
|
616
|
+
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
617
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
618
|
+
functionality: "tools"
|
|
619
|
+
});
|
|
620
|
+
}
|
|
621
|
+
return baseArgs;
|
|
622
|
+
}
|
|
623
|
+
case "object-json": {
|
|
624
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
625
|
+
functionality: "object-json mode"
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
case "object-tool": {
|
|
629
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
630
|
+
functionality: "object-tool mode"
|
|
631
|
+
});
|
|
632
|
+
}
|
|
633
|
+
case "object-grammar": {
|
|
634
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
635
|
+
functionality: "object-grammar mode"
|
|
636
|
+
});
|
|
637
|
+
}
|
|
638
|
+
default: {
|
|
639
|
+
const _exhaustiveCheck = type;
|
|
640
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
async doGenerate(options) {
|
|
645
|
+
const args = this.getArgs(options);
|
|
646
|
+
const response = await (0, import_provider_utils4.postJsonToApi)({
|
|
647
|
+
url: `${this.config.baseUrl}/completions`,
|
|
648
|
+
headers: this.config.headers(),
|
|
649
|
+
body: args,
|
|
650
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
651
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
652
|
+
openAICompletionResponseSchema
|
|
653
|
+
),
|
|
654
|
+
abortSignal: options.abortSignal
|
|
655
|
+
});
|
|
656
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
657
|
+
const choice = response.choices[0];
|
|
658
|
+
return {
|
|
659
|
+
text: choice.text,
|
|
660
|
+
usage: {
|
|
661
|
+
promptTokens: response.usage.prompt_tokens,
|
|
662
|
+
completionTokens: response.usage.completion_tokens
|
|
663
|
+
},
|
|
664
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
665
|
+
rawCall: { rawPrompt, rawSettings },
|
|
666
|
+
warnings: []
|
|
667
|
+
};
|
|
668
|
+
}
|
|
669
|
+
async doStream(options) {
|
|
670
|
+
const args = this.getArgs(options);
|
|
671
|
+
const response = await (0, import_provider_utils4.postJsonToApi)({
|
|
672
|
+
url: `${this.config.baseUrl}/completions`,
|
|
673
|
+
headers: this.config.headers(),
|
|
674
|
+
body: {
|
|
675
|
+
...this.getArgs(options),
|
|
676
|
+
stream: true
|
|
677
|
+
},
|
|
678
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
679
|
+
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
680
|
+
openaiCompletionChunkSchema
|
|
681
|
+
),
|
|
682
|
+
abortSignal: options.abortSignal
|
|
683
|
+
});
|
|
684
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
685
|
+
let finishReason = "other";
|
|
686
|
+
let usage = {
|
|
687
|
+
promptTokens: Number.NaN,
|
|
688
|
+
completionTokens: Number.NaN
|
|
689
|
+
};
|
|
690
|
+
return {
|
|
691
|
+
stream: response.pipeThrough(
|
|
692
|
+
new TransformStream({
|
|
693
|
+
transform(chunk, controller) {
|
|
694
|
+
if (!chunk.success) {
|
|
695
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
696
|
+
return;
|
|
697
|
+
}
|
|
698
|
+
const value = chunk.value;
|
|
699
|
+
if (value.usage != null) {
|
|
700
|
+
usage = {
|
|
701
|
+
promptTokens: value.usage.prompt_tokens,
|
|
702
|
+
completionTokens: value.usage.completion_tokens
|
|
703
|
+
};
|
|
704
|
+
}
|
|
705
|
+
const choice = value.choices[0];
|
|
706
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
707
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
708
|
+
}
|
|
709
|
+
if ((choice == null ? void 0 : choice.text) != null) {
|
|
710
|
+
controller.enqueue({
|
|
711
|
+
type: "text-delta",
|
|
712
|
+
textDelta: choice.text
|
|
713
|
+
});
|
|
714
|
+
}
|
|
715
|
+
},
|
|
716
|
+
flush(controller) {
|
|
717
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
718
|
+
}
|
|
719
|
+
})
|
|
720
|
+
),
|
|
721
|
+
rawCall: { rawPrompt, rawSettings },
|
|
722
|
+
warnings: []
|
|
723
|
+
};
|
|
724
|
+
}
|
|
725
|
+
};
|
|
726
|
+
var openAICompletionResponseSchema = import_zod3.z.object({
|
|
727
|
+
choices: import_zod3.z.array(
|
|
728
|
+
import_zod3.z.object({
|
|
729
|
+
text: import_zod3.z.string(),
|
|
730
|
+
finish_reason: import_zod3.z.string()
|
|
731
|
+
})
|
|
732
|
+
),
|
|
733
|
+
usage: import_zod3.z.object({
|
|
734
|
+
prompt_tokens: import_zod3.z.number(),
|
|
735
|
+
completion_tokens: import_zod3.z.number()
|
|
736
|
+
})
|
|
737
|
+
});
|
|
738
|
+
var openaiCompletionChunkSchema = import_zod3.z.object({
|
|
739
|
+
object: import_zod3.z.literal("text_completion"),
|
|
740
|
+
choices: import_zod3.z.array(
|
|
741
|
+
import_zod3.z.object({
|
|
742
|
+
text: import_zod3.z.string(),
|
|
743
|
+
finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(),
|
|
744
|
+
index: import_zod3.z.number()
|
|
745
|
+
})
|
|
746
|
+
),
|
|
747
|
+
usage: import_zod3.z.object({
|
|
748
|
+
prompt_tokens: import_zod3.z.number(),
|
|
749
|
+
completion_tokens: import_zod3.z.number()
|
|
750
|
+
}).optional().nullable()
|
|
751
|
+
});
|
|
752
|
+
|
|
753
|
+
// src/openai-facade.ts
|
|
754
|
+
var OpenAI = class {
|
|
755
|
+
constructor(options = {}) {
|
|
756
|
+
this.baseUrl = options.baseUrl;
|
|
757
|
+
this.apiKey = options.apiKey;
|
|
758
|
+
this.organization = options.organization;
|
|
759
|
+
}
|
|
760
|
+
get baseConfig() {
|
|
761
|
+
var _a;
|
|
762
|
+
return {
|
|
763
|
+
organization: this.organization,
|
|
764
|
+
baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
|
|
765
|
+
headers: () => ({
|
|
766
|
+
Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
|
|
767
|
+
apiKey: this.apiKey,
|
|
768
|
+
environmentVariableName: "OPENAI_API_KEY",
|
|
769
|
+
description: "OpenAI"
|
|
770
|
+
})}`,
|
|
771
|
+
"OpenAI-Organization": this.organization
|
|
772
|
+
})
|
|
773
|
+
};
|
|
774
|
+
}
|
|
775
|
+
chat(modelId, settings = {}) {
|
|
776
|
+
return new OpenAIChatLanguageModel(modelId, settings, {
|
|
777
|
+
provider: "openai.chat",
|
|
778
|
+
...this.baseConfig
|
|
779
|
+
});
|
|
780
|
+
}
|
|
781
|
+
completion(modelId, settings = {}) {
|
|
782
|
+
return new OpenAICompletionLanguageModel(modelId, settings, {
|
|
783
|
+
provider: "openai.completion",
|
|
784
|
+
...this.baseConfig
|
|
785
|
+
});
|
|
786
|
+
}
|
|
787
|
+
};
|
|
788
|
+
var openai = new OpenAI();
|
|
789
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
790
|
+
0 && (module.exports = {
|
|
791
|
+
OpenAI,
|
|
792
|
+
openai
|
|
793
|
+
});
|
|
794
|
+
//# sourceMappingURL=index.js.map
|