@ai-sdk/openai 2.0.0-canary.1 → 2.0.0-canary.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -91,7 +91,7 @@ function convertToOpenAIChatMessages({
91
91
  image_url: {
92
92
  url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
93
93
  // OpenAI specific extension: image detail
94
- detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
94
+ detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
95
95
  }
96
96
  };
97
97
  }
@@ -270,17 +270,16 @@ function getResponseMetadata({
270
270
  // src/openai-prepare-tools.ts
271
271
  var import_provider2 = require("@ai-sdk/provider");
272
272
  function prepareTools({
273
- mode,
273
+ tools,
274
+ toolChoice,
274
275
  useLegacyFunctionCalling = false,
275
276
  structuredOutputs
276
277
  }) {
277
- var _a;
278
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
278
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
279
279
  const toolWarnings = [];
280
280
  if (tools == null) {
281
- return { tools: void 0, tool_choice: void 0, toolWarnings };
281
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
282
282
  }
283
- const toolChoice = mode.toolChoice;
284
283
  if (useLegacyFunctionCalling) {
285
284
  const openaiFunctions = [];
286
285
  for (const tool of tools) {
@@ -340,18 +339,18 @@ function prepareTools({
340
339
  }
341
340
  }
342
341
  if (toolChoice == null) {
343
- return { tools: openaiTools, tool_choice: void 0, toolWarnings };
342
+ return { tools: openaiTools, toolChoice: void 0, toolWarnings };
344
343
  }
345
344
  const type = toolChoice.type;
346
345
  switch (type) {
347
346
  case "auto":
348
347
  case "none":
349
348
  case "required":
350
- return { tools: openaiTools, tool_choice: type, toolWarnings };
349
+ return { tools: openaiTools, toolChoice: type, toolWarnings };
351
350
  case "tool":
352
351
  return {
353
352
  tools: openaiTools,
354
- tool_choice: {
353
+ toolChoice: {
355
354
  type: "function",
356
355
  function: {
357
356
  name: toolChoice.toolName
@@ -393,7 +392,6 @@ var OpenAIChatLanguageModel = class {
393
392
  return !this.settings.downloadImages;
394
393
  }
395
394
  getArgs({
396
- mode,
397
395
  prompt,
398
396
  maxTokens,
399
397
  temperature,
@@ -404,10 +402,11 @@ var OpenAIChatLanguageModel = class {
404
402
  stopSequences,
405
403
  responseFormat,
406
404
  seed,
407
- providerMetadata
405
+ tools,
406
+ toolChoice,
407
+ providerOptions
408
408
  }) {
409
- var _a, _b, _c, _d, _e, _f, _g, _h;
410
- const type = mode.type;
409
+ var _a, _b, _c, _d, _e, _f, _g;
411
410
  const warnings = [];
412
411
  if (topK != null) {
413
412
  warnings.push({
@@ -456,6 +455,7 @@ var OpenAIChatLanguageModel = class {
456
455
  top_p: topP,
457
456
  frequency_penalty: frequencyPenalty,
458
457
  presence_penalty: presencePenalty,
458
+ // TODO improve below:
459
459
  response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
460
460
  type: "json_schema",
461
461
  json_schema: {
@@ -469,11 +469,11 @@ var OpenAIChatLanguageModel = class {
469
469
  seed,
470
470
  // openai specific settings:
471
471
  // TODO remove in next major version; we auto-map maxTokens now
472
- max_completion_tokens: (_b = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _b.maxCompletionTokens,
473
- store: (_c = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _c.store,
474
- metadata: (_d = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _d.metadata,
475
- prediction: (_e = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _e.prediction,
476
- reasoning_effort: (_g = (_f = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
472
+ max_completion_tokens: (_b = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _b.maxCompletionTokens,
473
+ store: (_c = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _c.store,
474
+ metadata: (_d = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _d.metadata,
475
+ prediction: (_e = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _e.prediction,
476
+ reasoning_effort: (_g = (_f = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
477
477
  // messages:
478
478
  messages
479
479
  };
@@ -538,81 +538,28 @@ var OpenAIChatLanguageModel = class {
538
538
  baseArgs.max_tokens = void 0;
539
539
  }
540
540
  }
541
- switch (type) {
542
- case "regular": {
543
- const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
544
- mode,
545
- useLegacyFunctionCalling,
546
- structuredOutputs: this.supportsStructuredOutputs
547
- });
548
- return {
549
- args: {
550
- ...baseArgs,
551
- tools,
552
- tool_choice,
553
- functions,
554
- function_call
555
- },
556
- warnings: [...warnings, ...toolWarnings]
557
- };
558
- }
559
- case "object-json": {
560
- return {
561
- args: {
562
- ...baseArgs,
563
- response_format: this.supportsStructuredOutputs && mode.schema != null ? {
564
- type: "json_schema",
565
- json_schema: {
566
- schema: mode.schema,
567
- strict: true,
568
- name: (_h = mode.name) != null ? _h : "response",
569
- description: mode.description
570
- }
571
- } : { type: "json_object" }
572
- },
573
- warnings
574
- };
575
- }
576
- case "object-tool": {
577
- return {
578
- args: useLegacyFunctionCalling ? {
579
- ...baseArgs,
580
- function_call: {
581
- name: mode.tool.name
582
- },
583
- functions: [
584
- {
585
- name: mode.tool.name,
586
- description: mode.tool.description,
587
- parameters: mode.tool.parameters
588
- }
589
- ]
590
- } : {
591
- ...baseArgs,
592
- tool_choice: {
593
- type: "function",
594
- function: { name: mode.tool.name }
595
- },
596
- tools: [
597
- {
598
- type: "function",
599
- function: {
600
- name: mode.tool.name,
601
- description: mode.tool.description,
602
- parameters: mode.tool.parameters,
603
- strict: this.supportsStructuredOutputs ? true : void 0
604
- }
605
- }
606
- ]
607
- },
608
- warnings
609
- };
610
- }
611
- default: {
612
- const _exhaustiveCheck = type;
613
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
614
- }
615
- }
541
+ const {
542
+ tools: openaiTools,
543
+ toolChoice: openaiToolChoice,
544
+ functions,
545
+ function_call,
546
+ toolWarnings
547
+ } = prepareTools({
548
+ tools,
549
+ toolChoice,
550
+ useLegacyFunctionCalling,
551
+ structuredOutputs: this.supportsStructuredOutputs
552
+ });
553
+ return {
554
+ args: {
555
+ ...baseArgs,
556
+ tools: openaiTools,
557
+ tool_choice: openaiToolChoice,
558
+ functions,
559
+ function_call
560
+ },
561
+ warnings: [...warnings, ...toolWarnings]
562
+ };
616
563
  }
617
564
  async doGenerate(options) {
618
565
  var _a, _b, _c, _d, _e, _f, _g, _h;
@@ -1079,7 +1026,6 @@ var reasoningModels = {
1079
1026
  };
1080
1027
 
1081
1028
  // src/openai-completion-language-model.ts
1082
- var import_provider5 = require("@ai-sdk/provider");
1083
1029
  var import_provider_utils4 = require("@ai-sdk/provider-utils");
1084
1030
  var import_zod3 = require("zod");
1085
1031
 
@@ -1194,7 +1140,6 @@ var OpenAICompletionLanguageModel = class {
1194
1140
  return this.config.provider;
1195
1141
  }
1196
1142
  getArgs({
1197
- mode,
1198
1143
  inputFormat,
1199
1144
  prompt,
1200
1145
  maxTokens,
@@ -1205,16 +1150,19 @@ var OpenAICompletionLanguageModel = class {
1205
1150
  presencePenalty,
1206
1151
  stopSequences: userStopSequences,
1207
1152
  responseFormat,
1153
+ tools,
1154
+ toolChoice,
1208
1155
  seed
1209
1156
  }) {
1210
- var _a;
1211
- const type = mode.type;
1212
1157
  const warnings = [];
1213
1158
  if (topK != null) {
1214
- warnings.push({
1215
- type: "unsupported-setting",
1216
- setting: "topK"
1217
- });
1159
+ warnings.push({ type: "unsupported-setting", setting: "topK" });
1160
+ }
1161
+ if (tools == null ? void 0 : tools.length) {
1162
+ warnings.push({ type: "unsupported-setting", setting: "tools" });
1163
+ }
1164
+ if (toolChoice != null) {
1165
+ warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
1218
1166
  }
1219
1167
  if (responseFormat != null && responseFormat.type !== "text") {
1220
1168
  warnings.push({
@@ -1225,56 +1173,30 @@ var OpenAICompletionLanguageModel = class {
1225
1173
  }
1226
1174
  const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
1227
1175
  const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
1228
- const baseArgs = {
1229
- // model id:
1230
- model: this.modelId,
1231
- // model specific settings:
1232
- echo: this.settings.echo,
1233
- logit_bias: this.settings.logitBias,
1234
- logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
1235
- suffix: this.settings.suffix,
1236
- user: this.settings.user,
1237
- // standardized settings:
1238
- max_tokens: maxTokens,
1239
- temperature,
1240
- top_p: topP,
1241
- frequency_penalty: frequencyPenalty,
1242
- presence_penalty: presencePenalty,
1243
- seed,
1244
- // prompt:
1245
- prompt: completionPrompt,
1246
- // stop sequences:
1247
- stop: stop.length > 0 ? stop : void 0
1176
+ return {
1177
+ args: {
1178
+ // model id:
1179
+ model: this.modelId,
1180
+ // model specific settings:
1181
+ echo: this.settings.echo,
1182
+ logit_bias: this.settings.logitBias,
1183
+ logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
1184
+ suffix: this.settings.suffix,
1185
+ user: this.settings.user,
1186
+ // standardized settings:
1187
+ max_tokens: maxTokens,
1188
+ temperature,
1189
+ top_p: topP,
1190
+ frequency_penalty: frequencyPenalty,
1191
+ presence_penalty: presencePenalty,
1192
+ seed,
1193
+ // prompt:
1194
+ prompt: completionPrompt,
1195
+ // stop sequences:
1196
+ stop: stop.length > 0 ? stop : void 0
1197
+ },
1198
+ warnings
1248
1199
  };
1249
- switch (type) {
1250
- case "regular": {
1251
- if ((_a = mode.tools) == null ? void 0 : _a.length) {
1252
- throw new import_provider5.UnsupportedFunctionalityError({
1253
- functionality: "tools"
1254
- });
1255
- }
1256
- if (mode.toolChoice) {
1257
- throw new import_provider5.UnsupportedFunctionalityError({
1258
- functionality: "toolChoice"
1259
- });
1260
- }
1261
- return { args: baseArgs, warnings };
1262
- }
1263
- case "object-json": {
1264
- throw new import_provider5.UnsupportedFunctionalityError({
1265
- functionality: "object-json mode"
1266
- });
1267
- }
1268
- case "object-tool": {
1269
- throw new import_provider5.UnsupportedFunctionalityError({
1270
- functionality: "object-tool mode"
1271
- });
1272
- }
1273
- default: {
1274
- const _exhaustiveCheck = type;
1275
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1276
- }
1277
- }
1278
1200
  }
1279
1201
  async doGenerate(options) {
1280
1202
  const { args, warnings } = this.getArgs(options);
@@ -1452,7 +1374,7 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
1452
1374
  ]);
1453
1375
 
1454
1376
  // src/openai-embedding-model.ts
1455
- var import_provider6 = require("@ai-sdk/provider");
1377
+ var import_provider5 = require("@ai-sdk/provider");
1456
1378
  var import_provider_utils5 = require("@ai-sdk/provider-utils");
1457
1379
  var import_zod4 = require("zod");
1458
1380
  var OpenAIEmbeddingModel = class {
@@ -1479,7 +1401,7 @@ var OpenAIEmbeddingModel = class {
1479
1401
  abortSignal
1480
1402
  }) {
1481
1403
  if (values.length > this.maxEmbeddingsPerCall) {
1482
- throw new import_provider6.TooManyEmbeddingValuesForCallError({
1404
+ throw new import_provider5.TooManyEmbeddingValuesForCallError({
1483
1405
  provider: this.provider,
1484
1406
  modelId: this.modelId,
1485
1407
  maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
@@ -1607,7 +1529,7 @@ var import_provider_utils8 = require("@ai-sdk/provider-utils");
1607
1529
  var import_zod6 = require("zod");
1608
1530
 
1609
1531
  // src/responses/convert-to-openai-responses-messages.ts
1610
- var import_provider7 = require("@ai-sdk/provider");
1532
+ var import_provider6 = require("@ai-sdk/provider");
1611
1533
  var import_provider_utils7 = require("@ai-sdk/provider-utils");
1612
1534
  function convertToOpenAIResponsesMessages({
1613
1535
  prompt,
@@ -1657,12 +1579,12 @@ function convertToOpenAIResponsesMessages({
1657
1579
  type: "input_image",
1658
1580
  image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
1659
1581
  // OpenAI specific extension: image detail
1660
- detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1582
+ detail: (_c = (_b = part.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1661
1583
  };
1662
1584
  }
1663
1585
  case "file": {
1664
1586
  if (part.data instanceof URL) {
1665
- throw new import_provider7.UnsupportedFunctionalityError({
1587
+ throw new import_provider6.UnsupportedFunctionalityError({
1666
1588
  functionality: "File URLs in user messages"
1667
1589
  });
1668
1590
  }
@@ -1675,7 +1597,7 @@ function convertToOpenAIResponsesMessages({
1675
1597
  };
1676
1598
  }
1677
1599
  default: {
1678
- throw new import_provider7.UnsupportedFunctionalityError({
1600
+ throw new import_provider6.UnsupportedFunctionalityError({
1679
1601
  functionality: "Only PDF files are supported in user messages"
1680
1602
  });
1681
1603
  }
@@ -1747,18 +1669,17 @@ function mapOpenAIResponseFinishReason({
1747
1669
  }
1748
1670
 
1749
1671
  // src/responses/openai-responses-prepare-tools.ts
1750
- var import_provider8 = require("@ai-sdk/provider");
1672
+ var import_provider7 = require("@ai-sdk/provider");
1751
1673
  function prepareResponsesTools({
1752
- mode,
1674
+ tools,
1675
+ toolChoice,
1753
1676
  strict
1754
1677
  }) {
1755
- var _a;
1756
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
1678
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
1757
1679
  const toolWarnings = [];
1758
1680
  if (tools == null) {
1759
- return { tools: void 0, tool_choice: void 0, toolWarnings };
1681
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
1760
1682
  }
1761
- const toolChoice = mode.toolChoice;
1762
1683
  const openaiTools = [];
1763
1684
  for (const tool of tools) {
1764
1685
  switch (tool.type) {
@@ -1791,36 +1712,23 @@ function prepareResponsesTools({
1791
1712
  }
1792
1713
  }
1793
1714
  if (toolChoice == null) {
1794
- return { tools: openaiTools, tool_choice: void 0, toolWarnings };
1715
+ return { tools: openaiTools, toolChoice: void 0, toolWarnings };
1795
1716
  }
1796
1717
  const type = toolChoice.type;
1797
1718
  switch (type) {
1798
1719
  case "auto":
1799
1720
  case "none":
1800
1721
  case "required":
1801
- return { tools: openaiTools, tool_choice: type, toolWarnings };
1802
- case "tool": {
1803
- if (toolChoice.toolName === "web_search_preview") {
1804
- return {
1805
- tools: openaiTools,
1806
- tool_choice: {
1807
- type: "web_search_preview"
1808
- },
1809
- toolWarnings
1810
- };
1811
- }
1722
+ return { tools: openaiTools, toolChoice: type, toolWarnings };
1723
+ case "tool":
1812
1724
  return {
1813
1725
  tools: openaiTools,
1814
- tool_choice: {
1815
- type: "function",
1816
- name: toolChoice.toolName
1817
- },
1726
+ toolChoice: toolChoice.toolName === "web_search_preview" ? { type: "web_search_preview" } : { type: "function", name: toolChoice.toolName },
1818
1727
  toolWarnings
1819
1728
  };
1820
- }
1821
1729
  default: {
1822
1730
  const _exhaustiveCheck = type;
1823
- throw new import_provider8.UnsupportedFunctionalityError({
1731
+ throw new import_provider7.UnsupportedFunctionalityError({
1824
1732
  functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1825
1733
  });
1826
1734
  }
@@ -1839,7 +1747,6 @@ var OpenAIResponsesLanguageModel = class {
1839
1747
  return this.config.provider;
1840
1748
  }
1841
1749
  getArgs({
1842
- mode,
1843
1750
  maxTokens,
1844
1751
  temperature,
1845
1752
  stopSequences,
@@ -1849,24 +1756,19 @@ var OpenAIResponsesLanguageModel = class {
1849
1756
  frequencyPenalty,
1850
1757
  seed,
1851
1758
  prompt,
1852
- providerMetadata,
1759
+ providerOptions,
1760
+ tools,
1761
+ toolChoice,
1853
1762
  responseFormat
1854
1763
  }) {
1855
- var _a, _b, _c;
1764
+ var _a, _b;
1856
1765
  const warnings = [];
1857
1766
  const modelConfig = getResponsesModelConfig(this.modelId);
1858
- const type = mode.type;
1859
1767
  if (topK != null) {
1860
- warnings.push({
1861
- type: "unsupported-setting",
1862
- setting: "topK"
1863
- });
1768
+ warnings.push({ type: "unsupported-setting", setting: "topK" });
1864
1769
  }
1865
1770
  if (seed != null) {
1866
- warnings.push({
1867
- type: "unsupported-setting",
1868
- setting: "seed"
1869
- });
1771
+ warnings.push({ type: "unsupported-setting", setting: "seed" });
1870
1772
  }
1871
1773
  if (presencePenalty != null) {
1872
1774
  warnings.push({
@@ -1881,10 +1783,7 @@ var OpenAIResponsesLanguageModel = class {
1881
1783
  });
1882
1784
  }
1883
1785
  if (stopSequences != null) {
1884
- warnings.push({
1885
- type: "unsupported-setting",
1886
- setting: "stopSequences"
1887
- });
1786
+ warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
1888
1787
  }
1889
1788
  const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
1890
1789
  prompt,
@@ -1893,7 +1792,7 @@ var OpenAIResponsesLanguageModel = class {
1893
1792
  warnings.push(...messageWarnings);
1894
1793
  const openaiOptions = (0, import_provider_utils8.parseProviderOptions)({
1895
1794
  provider: "openai",
1896
- providerOptions: providerMetadata,
1795
+ providerOptions,
1897
1796
  schema: openaiResponsesProviderOptionsSchema
1898
1797
  });
1899
1798
  const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
@@ -1947,62 +1846,23 @@ var OpenAIResponsesLanguageModel = class {
1947
1846
  });
1948
1847
  }
1949
1848
  }
1950
- switch (type) {
1951
- case "regular": {
1952
- const { tools, tool_choice, toolWarnings } = prepareResponsesTools({
1953
- mode,
1954
- strict: isStrict
1955
- // TODO support provider options on tools
1956
- });
1957
- return {
1958
- args: {
1959
- ...baseArgs,
1960
- tools,
1961
- tool_choice
1962
- },
1963
- warnings: [...warnings, ...toolWarnings]
1964
- };
1965
- }
1966
- case "object-json": {
1967
- return {
1968
- args: {
1969
- ...baseArgs,
1970
- text: {
1971
- format: mode.schema != null ? {
1972
- type: "json_schema",
1973
- strict: isStrict,
1974
- name: (_c = mode.name) != null ? _c : "response",
1975
- description: mode.description,
1976
- schema: mode.schema
1977
- } : { type: "json_object" }
1978
- }
1979
- },
1980
- warnings
1981
- };
1982
- }
1983
- case "object-tool": {
1984
- return {
1985
- args: {
1986
- ...baseArgs,
1987
- tool_choice: { type: "function", name: mode.tool.name },
1988
- tools: [
1989
- {
1990
- type: "function",
1991
- name: mode.tool.name,
1992
- description: mode.tool.description,
1993
- parameters: mode.tool.parameters,
1994
- strict: isStrict
1995
- }
1996
- ]
1997
- },
1998
- warnings
1999
- };
2000
- }
2001
- default: {
2002
- const _exhaustiveCheck = type;
2003
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
2004
- }
2005
- }
1849
+ const {
1850
+ tools: openaiTools,
1851
+ toolChoice: openaiToolChoice,
1852
+ toolWarnings
1853
+ } = prepareResponsesTools({
1854
+ tools,
1855
+ toolChoice,
1856
+ strict: isStrict
1857
+ });
1858
+ return {
1859
+ args: {
1860
+ ...baseArgs,
1861
+ tools: openaiTools,
1862
+ tool_choice: openaiToolChoice
1863
+ },
1864
+ warnings: [...warnings, ...toolWarnings]
1865
+ };
2006
1866
  }
2007
1867
  async doGenerate(options) {
2008
1868
  var _a, _b, _c, _d, _e;