@ai-sdk/openai 2.0.0-canary.1 → 2.0.0-canary.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/dist/index.js +113 -253
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +111 -253
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +113 -253
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +111 -253
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.0.0-canary.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [c57e248]
|
|
8
|
+
- Updated dependencies [33f4a6a]
|
|
9
|
+
- @ai-sdk/provider@2.0.0-canary.1
|
|
10
|
+
- @ai-sdk/provider-utils@3.0.0-canary.2
|
|
11
|
+
|
|
3
12
|
## 2.0.0-canary.1
|
|
4
13
|
|
|
5
14
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -90,7 +90,7 @@ function convertToOpenAIChatMessages({
|
|
|
90
90
|
image_url: {
|
|
91
91
|
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
|
|
92
92
|
// OpenAI specific extension: image detail
|
|
93
|
-
detail: (_c = (_b = part.
|
|
93
|
+
detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
94
94
|
}
|
|
95
95
|
};
|
|
96
96
|
}
|
|
@@ -269,17 +269,16 @@ function getResponseMetadata({
|
|
|
269
269
|
// src/openai-prepare-tools.ts
|
|
270
270
|
var import_provider2 = require("@ai-sdk/provider");
|
|
271
271
|
function prepareTools({
|
|
272
|
-
|
|
272
|
+
tools,
|
|
273
|
+
toolChoice,
|
|
273
274
|
useLegacyFunctionCalling = false,
|
|
274
275
|
structuredOutputs
|
|
275
276
|
}) {
|
|
276
|
-
|
|
277
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
277
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
278
278
|
const toolWarnings = [];
|
|
279
279
|
if (tools == null) {
|
|
280
|
-
return { tools: void 0,
|
|
280
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
281
281
|
}
|
|
282
|
-
const toolChoice = mode.toolChoice;
|
|
283
282
|
if (useLegacyFunctionCalling) {
|
|
284
283
|
const openaiFunctions = [];
|
|
285
284
|
for (const tool of tools) {
|
|
@@ -339,18 +338,18 @@ function prepareTools({
|
|
|
339
338
|
}
|
|
340
339
|
}
|
|
341
340
|
if (toolChoice == null) {
|
|
342
|
-
return { tools: openaiTools2,
|
|
341
|
+
return { tools: openaiTools2, toolChoice: void 0, toolWarnings };
|
|
343
342
|
}
|
|
344
343
|
const type = toolChoice.type;
|
|
345
344
|
switch (type) {
|
|
346
345
|
case "auto":
|
|
347
346
|
case "none":
|
|
348
347
|
case "required":
|
|
349
|
-
return { tools: openaiTools2,
|
|
348
|
+
return { tools: openaiTools2, toolChoice: type, toolWarnings };
|
|
350
349
|
case "tool":
|
|
351
350
|
return {
|
|
352
351
|
tools: openaiTools2,
|
|
353
|
-
|
|
352
|
+
toolChoice: {
|
|
354
353
|
type: "function",
|
|
355
354
|
function: {
|
|
356
355
|
name: toolChoice.toolName
|
|
@@ -392,7 +391,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
392
391
|
return !this.settings.downloadImages;
|
|
393
392
|
}
|
|
394
393
|
getArgs({
|
|
395
|
-
mode,
|
|
396
394
|
prompt,
|
|
397
395
|
maxTokens,
|
|
398
396
|
temperature,
|
|
@@ -403,10 +401,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
403
401
|
stopSequences,
|
|
404
402
|
responseFormat,
|
|
405
403
|
seed,
|
|
406
|
-
|
|
404
|
+
tools,
|
|
405
|
+
toolChoice,
|
|
406
|
+
providerOptions
|
|
407
407
|
}) {
|
|
408
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
409
|
-
const type = mode.type;
|
|
408
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
410
409
|
const warnings = [];
|
|
411
410
|
if (topK != null) {
|
|
412
411
|
warnings.push({
|
|
@@ -455,6 +454,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
455
454
|
top_p: topP,
|
|
456
455
|
frequency_penalty: frequencyPenalty,
|
|
457
456
|
presence_penalty: presencePenalty,
|
|
457
|
+
// TODO improve below:
|
|
458
458
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
459
459
|
type: "json_schema",
|
|
460
460
|
json_schema: {
|
|
@@ -468,11 +468,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
468
468
|
seed,
|
|
469
469
|
// openai specific settings:
|
|
470
470
|
// TODO remove in next major version; we auto-map maxTokens now
|
|
471
|
-
max_completion_tokens: (_b =
|
|
472
|
-
store: (_c =
|
|
473
|
-
metadata: (_d =
|
|
474
|
-
prediction: (_e =
|
|
475
|
-
reasoning_effort: (_g = (_f =
|
|
471
|
+
max_completion_tokens: (_b = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
472
|
+
store: (_c = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _c.store,
|
|
473
|
+
metadata: (_d = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _d.metadata,
|
|
474
|
+
prediction: (_e = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _e.prediction,
|
|
475
|
+
reasoning_effort: (_g = (_f = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
476
476
|
// messages:
|
|
477
477
|
messages
|
|
478
478
|
};
|
|
@@ -537,81 +537,28 @@ var OpenAIChatLanguageModel = class {
|
|
|
537
537
|
baseArgs.max_tokens = void 0;
|
|
538
538
|
}
|
|
539
539
|
}
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
563
|
-
type: "json_schema",
|
|
564
|
-
json_schema: {
|
|
565
|
-
schema: mode.schema,
|
|
566
|
-
strict: true,
|
|
567
|
-
name: (_h = mode.name) != null ? _h : "response",
|
|
568
|
-
description: mode.description
|
|
569
|
-
}
|
|
570
|
-
} : { type: "json_object" }
|
|
571
|
-
},
|
|
572
|
-
warnings
|
|
573
|
-
};
|
|
574
|
-
}
|
|
575
|
-
case "object-tool": {
|
|
576
|
-
return {
|
|
577
|
-
args: useLegacyFunctionCalling ? {
|
|
578
|
-
...baseArgs,
|
|
579
|
-
function_call: {
|
|
580
|
-
name: mode.tool.name
|
|
581
|
-
},
|
|
582
|
-
functions: [
|
|
583
|
-
{
|
|
584
|
-
name: mode.tool.name,
|
|
585
|
-
description: mode.tool.description,
|
|
586
|
-
parameters: mode.tool.parameters
|
|
587
|
-
}
|
|
588
|
-
]
|
|
589
|
-
} : {
|
|
590
|
-
...baseArgs,
|
|
591
|
-
tool_choice: {
|
|
592
|
-
type: "function",
|
|
593
|
-
function: { name: mode.tool.name }
|
|
594
|
-
},
|
|
595
|
-
tools: [
|
|
596
|
-
{
|
|
597
|
-
type: "function",
|
|
598
|
-
function: {
|
|
599
|
-
name: mode.tool.name,
|
|
600
|
-
description: mode.tool.description,
|
|
601
|
-
parameters: mode.tool.parameters,
|
|
602
|
-
strict: this.supportsStructuredOutputs ? true : void 0
|
|
603
|
-
}
|
|
604
|
-
}
|
|
605
|
-
]
|
|
606
|
-
},
|
|
607
|
-
warnings
|
|
608
|
-
};
|
|
609
|
-
}
|
|
610
|
-
default: {
|
|
611
|
-
const _exhaustiveCheck = type;
|
|
612
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
613
|
-
}
|
|
614
|
-
}
|
|
540
|
+
const {
|
|
541
|
+
tools: openaiTools2,
|
|
542
|
+
toolChoice: openaiToolChoice,
|
|
543
|
+
functions,
|
|
544
|
+
function_call,
|
|
545
|
+
toolWarnings
|
|
546
|
+
} = prepareTools({
|
|
547
|
+
tools,
|
|
548
|
+
toolChoice,
|
|
549
|
+
useLegacyFunctionCalling,
|
|
550
|
+
structuredOutputs: this.supportsStructuredOutputs
|
|
551
|
+
});
|
|
552
|
+
return {
|
|
553
|
+
args: {
|
|
554
|
+
...baseArgs,
|
|
555
|
+
tools: openaiTools2,
|
|
556
|
+
tool_choice: openaiToolChoice,
|
|
557
|
+
functions,
|
|
558
|
+
function_call
|
|
559
|
+
},
|
|
560
|
+
warnings: [...warnings, ...toolWarnings]
|
|
561
|
+
};
|
|
615
562
|
}
|
|
616
563
|
async doGenerate(options) {
|
|
617
564
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -1078,7 +1025,6 @@ var reasoningModels = {
|
|
|
1078
1025
|
};
|
|
1079
1026
|
|
|
1080
1027
|
// src/openai-completion-language-model.ts
|
|
1081
|
-
var import_provider5 = require("@ai-sdk/provider");
|
|
1082
1028
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1083
1029
|
var import_zod3 = require("zod");
|
|
1084
1030
|
|
|
@@ -1193,7 +1139,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1193
1139
|
return this.config.provider;
|
|
1194
1140
|
}
|
|
1195
1141
|
getArgs({
|
|
1196
|
-
mode,
|
|
1197
1142
|
inputFormat,
|
|
1198
1143
|
prompt,
|
|
1199
1144
|
maxTokens,
|
|
@@ -1204,16 +1149,19 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1204
1149
|
presencePenalty,
|
|
1205
1150
|
stopSequences: userStopSequences,
|
|
1206
1151
|
responseFormat,
|
|
1152
|
+
tools,
|
|
1153
|
+
toolChoice,
|
|
1207
1154
|
seed
|
|
1208
1155
|
}) {
|
|
1209
|
-
var _a;
|
|
1210
|
-
const type = mode.type;
|
|
1211
1156
|
const warnings = [];
|
|
1212
1157
|
if (topK != null) {
|
|
1213
|
-
warnings.push({
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
});
|
|
1158
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1159
|
+
}
|
|
1160
|
+
if (tools == null ? void 0 : tools.length) {
|
|
1161
|
+
warnings.push({ type: "unsupported-setting", setting: "tools" });
|
|
1162
|
+
}
|
|
1163
|
+
if (toolChoice != null) {
|
|
1164
|
+
warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
|
|
1217
1165
|
}
|
|
1218
1166
|
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1219
1167
|
warnings.push({
|
|
@@ -1224,56 +1172,30 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1224
1172
|
}
|
|
1225
1173
|
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1226
1174
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1175
|
+
return {
|
|
1176
|
+
args: {
|
|
1177
|
+
// model id:
|
|
1178
|
+
model: this.modelId,
|
|
1179
|
+
// model specific settings:
|
|
1180
|
+
echo: this.settings.echo,
|
|
1181
|
+
logit_bias: this.settings.logitBias,
|
|
1182
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1183
|
+
suffix: this.settings.suffix,
|
|
1184
|
+
user: this.settings.user,
|
|
1185
|
+
// standardized settings:
|
|
1186
|
+
max_tokens: maxTokens,
|
|
1187
|
+
temperature,
|
|
1188
|
+
top_p: topP,
|
|
1189
|
+
frequency_penalty: frequencyPenalty,
|
|
1190
|
+
presence_penalty: presencePenalty,
|
|
1191
|
+
seed,
|
|
1192
|
+
// prompt:
|
|
1193
|
+
prompt: completionPrompt,
|
|
1194
|
+
// stop sequences:
|
|
1195
|
+
stop: stop.length > 0 ? stop : void 0
|
|
1196
|
+
},
|
|
1197
|
+
warnings
|
|
1247
1198
|
};
|
|
1248
|
-
switch (type) {
|
|
1249
|
-
case "regular": {
|
|
1250
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1251
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1252
|
-
functionality: "tools"
|
|
1253
|
-
});
|
|
1254
|
-
}
|
|
1255
|
-
if (mode.toolChoice) {
|
|
1256
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1257
|
-
functionality: "toolChoice"
|
|
1258
|
-
});
|
|
1259
|
-
}
|
|
1260
|
-
return { args: baseArgs, warnings };
|
|
1261
|
-
}
|
|
1262
|
-
case "object-json": {
|
|
1263
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1264
|
-
functionality: "object-json mode"
|
|
1265
|
-
});
|
|
1266
|
-
}
|
|
1267
|
-
case "object-tool": {
|
|
1268
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1269
|
-
functionality: "object-tool mode"
|
|
1270
|
-
});
|
|
1271
|
-
}
|
|
1272
|
-
default: {
|
|
1273
|
-
const _exhaustiveCheck = type;
|
|
1274
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
1199
|
}
|
|
1278
1200
|
async doGenerate(options) {
|
|
1279
1201
|
const { args, warnings } = this.getArgs(options);
|
|
@@ -1451,7 +1373,7 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
|
|
|
1451
1373
|
]);
|
|
1452
1374
|
|
|
1453
1375
|
// src/openai-embedding-model.ts
|
|
1454
|
-
var
|
|
1376
|
+
var import_provider5 = require("@ai-sdk/provider");
|
|
1455
1377
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1456
1378
|
var import_zod4 = require("zod");
|
|
1457
1379
|
var OpenAIEmbeddingModel = class {
|
|
@@ -1478,7 +1400,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1478
1400
|
abortSignal
|
|
1479
1401
|
}) {
|
|
1480
1402
|
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1481
|
-
throw new
|
|
1403
|
+
throw new import_provider5.TooManyEmbeddingValuesForCallError({
|
|
1482
1404
|
provider: this.provider,
|
|
1483
1405
|
modelId: this.modelId,
|
|
1484
1406
|
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
@@ -1627,7 +1549,7 @@ var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
|
1627
1549
|
var import_zod7 = require("zod");
|
|
1628
1550
|
|
|
1629
1551
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1630
|
-
var
|
|
1552
|
+
var import_provider6 = require("@ai-sdk/provider");
|
|
1631
1553
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1632
1554
|
function convertToOpenAIResponsesMessages({
|
|
1633
1555
|
prompt,
|
|
@@ -1677,12 +1599,12 @@ function convertToOpenAIResponsesMessages({
|
|
|
1677
1599
|
type: "input_image",
|
|
1678
1600
|
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
|
|
1679
1601
|
// OpenAI specific extension: image detail
|
|
1680
|
-
detail: (_c = (_b = part.
|
|
1602
|
+
detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
1681
1603
|
};
|
|
1682
1604
|
}
|
|
1683
1605
|
case "file": {
|
|
1684
1606
|
if (part.data instanceof URL) {
|
|
1685
|
-
throw new
|
|
1607
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1686
1608
|
functionality: "File URLs in user messages"
|
|
1687
1609
|
});
|
|
1688
1610
|
}
|
|
@@ -1695,7 +1617,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1695
1617
|
};
|
|
1696
1618
|
}
|
|
1697
1619
|
default: {
|
|
1698
|
-
throw new
|
|
1620
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1699
1621
|
functionality: "Only PDF files are supported in user messages"
|
|
1700
1622
|
});
|
|
1701
1623
|
}
|
|
@@ -1767,18 +1689,17 @@ function mapOpenAIResponseFinishReason({
|
|
|
1767
1689
|
}
|
|
1768
1690
|
|
|
1769
1691
|
// src/responses/openai-responses-prepare-tools.ts
|
|
1770
|
-
var
|
|
1692
|
+
var import_provider7 = require("@ai-sdk/provider");
|
|
1771
1693
|
function prepareResponsesTools({
|
|
1772
|
-
|
|
1694
|
+
tools,
|
|
1695
|
+
toolChoice,
|
|
1773
1696
|
strict
|
|
1774
1697
|
}) {
|
|
1775
|
-
|
|
1776
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
1698
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
1777
1699
|
const toolWarnings = [];
|
|
1778
1700
|
if (tools == null) {
|
|
1779
|
-
return { tools: void 0,
|
|
1701
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
1780
1702
|
}
|
|
1781
|
-
const toolChoice = mode.toolChoice;
|
|
1782
1703
|
const openaiTools2 = [];
|
|
1783
1704
|
for (const tool of tools) {
|
|
1784
1705
|
switch (tool.type) {
|
|
@@ -1811,36 +1732,23 @@ function prepareResponsesTools({
|
|
|
1811
1732
|
}
|
|
1812
1733
|
}
|
|
1813
1734
|
if (toolChoice == null) {
|
|
1814
|
-
return { tools: openaiTools2,
|
|
1735
|
+
return { tools: openaiTools2, toolChoice: void 0, toolWarnings };
|
|
1815
1736
|
}
|
|
1816
1737
|
const type = toolChoice.type;
|
|
1817
1738
|
switch (type) {
|
|
1818
1739
|
case "auto":
|
|
1819
1740
|
case "none":
|
|
1820
1741
|
case "required":
|
|
1821
|
-
return { tools: openaiTools2,
|
|
1822
|
-
case "tool":
|
|
1823
|
-
if (toolChoice.toolName === "web_search_preview") {
|
|
1824
|
-
return {
|
|
1825
|
-
tools: openaiTools2,
|
|
1826
|
-
tool_choice: {
|
|
1827
|
-
type: "web_search_preview"
|
|
1828
|
-
},
|
|
1829
|
-
toolWarnings
|
|
1830
|
-
};
|
|
1831
|
-
}
|
|
1742
|
+
return { tools: openaiTools2, toolChoice: type, toolWarnings };
|
|
1743
|
+
case "tool":
|
|
1832
1744
|
return {
|
|
1833
1745
|
tools: openaiTools2,
|
|
1834
|
-
|
|
1835
|
-
type: "function",
|
|
1836
|
-
name: toolChoice.toolName
|
|
1837
|
-
},
|
|
1746
|
+
toolChoice: toolChoice.toolName === "web_search_preview" ? { type: "web_search_preview" } : { type: "function", name: toolChoice.toolName },
|
|
1838
1747
|
toolWarnings
|
|
1839
1748
|
};
|
|
1840
|
-
}
|
|
1841
1749
|
default: {
|
|
1842
1750
|
const _exhaustiveCheck = type;
|
|
1843
|
-
throw new
|
|
1751
|
+
throw new import_provider7.UnsupportedFunctionalityError({
|
|
1844
1752
|
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
1845
1753
|
});
|
|
1846
1754
|
}
|
|
@@ -1859,7 +1767,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1859
1767
|
return this.config.provider;
|
|
1860
1768
|
}
|
|
1861
1769
|
getArgs({
|
|
1862
|
-
mode,
|
|
1863
1770
|
maxTokens,
|
|
1864
1771
|
temperature,
|
|
1865
1772
|
stopSequences,
|
|
@@ -1869,24 +1776,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1869
1776
|
frequencyPenalty,
|
|
1870
1777
|
seed,
|
|
1871
1778
|
prompt,
|
|
1872
|
-
|
|
1779
|
+
providerOptions,
|
|
1780
|
+
tools,
|
|
1781
|
+
toolChoice,
|
|
1873
1782
|
responseFormat
|
|
1874
1783
|
}) {
|
|
1875
|
-
var _a, _b
|
|
1784
|
+
var _a, _b;
|
|
1876
1785
|
const warnings = [];
|
|
1877
1786
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
1878
|
-
const type = mode.type;
|
|
1879
1787
|
if (topK != null) {
|
|
1880
|
-
warnings.push({
|
|
1881
|
-
type: "unsupported-setting",
|
|
1882
|
-
setting: "topK"
|
|
1883
|
-
});
|
|
1788
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1884
1789
|
}
|
|
1885
1790
|
if (seed != null) {
|
|
1886
|
-
warnings.push({
|
|
1887
|
-
type: "unsupported-setting",
|
|
1888
|
-
setting: "seed"
|
|
1889
|
-
});
|
|
1791
|
+
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1890
1792
|
}
|
|
1891
1793
|
if (presencePenalty != null) {
|
|
1892
1794
|
warnings.push({
|
|
@@ -1901,10 +1803,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1901
1803
|
});
|
|
1902
1804
|
}
|
|
1903
1805
|
if (stopSequences != null) {
|
|
1904
|
-
warnings.push({
|
|
1905
|
-
type: "unsupported-setting",
|
|
1906
|
-
setting: "stopSequences"
|
|
1907
|
-
});
|
|
1806
|
+
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
1908
1807
|
}
|
|
1909
1808
|
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
1910
1809
|
prompt,
|
|
@@ -1913,7 +1812,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1913
1812
|
warnings.push(...messageWarnings);
|
|
1914
1813
|
const openaiOptions = (0, import_provider_utils8.parseProviderOptions)({
|
|
1915
1814
|
provider: "openai",
|
|
1916
|
-
providerOptions
|
|
1815
|
+
providerOptions,
|
|
1917
1816
|
schema: openaiResponsesProviderOptionsSchema
|
|
1918
1817
|
});
|
|
1919
1818
|
const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
|
|
@@ -1967,62 +1866,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1967
1866
|
});
|
|
1968
1867
|
}
|
|
1969
1868
|
}
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
return {
|
|
1988
|
-
args: {
|
|
1989
|
-
...baseArgs,
|
|
1990
|
-
text: {
|
|
1991
|
-
format: mode.schema != null ? {
|
|
1992
|
-
type: "json_schema",
|
|
1993
|
-
strict: isStrict,
|
|
1994
|
-
name: (_c = mode.name) != null ? _c : "response",
|
|
1995
|
-
description: mode.description,
|
|
1996
|
-
schema: mode.schema
|
|
1997
|
-
} : { type: "json_object" }
|
|
1998
|
-
}
|
|
1999
|
-
},
|
|
2000
|
-
warnings
|
|
2001
|
-
};
|
|
2002
|
-
}
|
|
2003
|
-
case "object-tool": {
|
|
2004
|
-
return {
|
|
2005
|
-
args: {
|
|
2006
|
-
...baseArgs,
|
|
2007
|
-
tool_choice: { type: "function", name: mode.tool.name },
|
|
2008
|
-
tools: [
|
|
2009
|
-
{
|
|
2010
|
-
type: "function",
|
|
2011
|
-
name: mode.tool.name,
|
|
2012
|
-
description: mode.tool.description,
|
|
2013
|
-
parameters: mode.tool.parameters,
|
|
2014
|
-
strict: isStrict
|
|
2015
|
-
}
|
|
2016
|
-
]
|
|
2017
|
-
},
|
|
2018
|
-
warnings
|
|
2019
|
-
};
|
|
2020
|
-
}
|
|
2021
|
-
default: {
|
|
2022
|
-
const _exhaustiveCheck = type;
|
|
2023
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2024
|
-
}
|
|
2025
|
-
}
|
|
1869
|
+
const {
|
|
1870
|
+
tools: openaiTools2,
|
|
1871
|
+
toolChoice: openaiToolChoice,
|
|
1872
|
+
toolWarnings
|
|
1873
|
+
} = prepareResponsesTools({
|
|
1874
|
+
tools,
|
|
1875
|
+
toolChoice,
|
|
1876
|
+
strict: isStrict
|
|
1877
|
+
});
|
|
1878
|
+
return {
|
|
1879
|
+
args: {
|
|
1880
|
+
...baseArgs,
|
|
1881
|
+
tools: openaiTools2,
|
|
1882
|
+
tool_choice: openaiToolChoice
|
|
1883
|
+
},
|
|
1884
|
+
warnings: [...warnings, ...toolWarnings]
|
|
1885
|
+
};
|
|
2026
1886
|
}
|
|
2027
1887
|
async doGenerate(options) {
|
|
2028
1888
|
var _a, _b, _c, _d, _e;
|