@ai-sdk/openai 0.0.37 → 0.0.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -4,7 +4,7 @@ import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
4
4
  // src/openai-chat-language-model.ts
5
5
  import {
6
6
  InvalidResponseDataError,
7
- UnsupportedFunctionalityError
7
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
8
8
  } from "@ai-sdk/provider";
9
9
  import {
10
10
  combineHeaders,
@@ -17,8 +17,14 @@ import {
17
17
  import { z as z2 } from "zod";
18
18
 
19
19
  // src/convert-to-openai-chat-messages.ts
20
+ import {
21
+ UnsupportedFunctionalityError
22
+ } from "@ai-sdk/provider";
20
23
  import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
21
- function convertToOpenAIChatMessages(prompt) {
24
+ function convertToOpenAIChatMessages({
25
+ prompt,
26
+ useLegacyFunctionCalling = false
27
+ }) {
22
28
  const messages = [];
23
29
  for (const { role, content } of prompt) {
24
30
  switch (role) {
@@ -78,20 +84,41 @@ function convertToOpenAIChatMessages(prompt) {
78
84
  }
79
85
  }
80
86
  }
81
- messages.push({
82
- role: "assistant",
83
- content: text,
84
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
85
- });
87
+ if (useLegacyFunctionCalling) {
88
+ if (toolCalls.length > 1) {
89
+ throw new UnsupportedFunctionalityError({
90
+ functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
91
+ });
92
+ }
93
+ messages.push({
94
+ role: "assistant",
95
+ content: text,
96
+ function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
97
+ });
98
+ } else {
99
+ messages.push({
100
+ role: "assistant",
101
+ content: text,
102
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
103
+ });
104
+ }
86
105
  break;
87
106
  }
88
107
  case "tool": {
89
108
  for (const toolResponse of content) {
90
- messages.push({
91
- role: "tool",
92
- tool_call_id: toolResponse.toolCallId,
93
- content: JSON.stringify(toolResponse.result)
94
- });
109
+ if (useLegacyFunctionCalling) {
110
+ messages.push({
111
+ role: "function",
112
+ name: toolResponse.toolName,
113
+ content: JSON.stringify(toolResponse.result)
114
+ });
115
+ } else {
116
+ messages.push({
117
+ role: "tool",
118
+ tool_call_id: toolResponse.toolCallId,
119
+ content: JSON.stringify(toolResponse.result)
120
+ });
121
+ }
95
122
  }
96
123
  break;
97
124
  }
@@ -168,11 +195,34 @@ var OpenAIChatLanguageModel = class {
168
195
  maxTokens,
169
196
  temperature,
170
197
  topP,
198
+ topK,
171
199
  frequencyPenalty,
172
200
  presencePenalty,
201
+ stopSequences,
202
+ responseFormat,
173
203
  seed
174
204
  }) {
175
205
  const type = mode.type;
206
+ const warnings = [];
207
+ if (topK != null) {
208
+ warnings.push({
209
+ type: "unsupported-setting",
210
+ setting: "topK"
211
+ });
212
+ }
213
+ if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
214
+ warnings.push({
215
+ type: "unsupported-setting",
216
+ setting: "responseFormat",
217
+ details: "JSON response format schema is not supported"
218
+ });
219
+ }
220
+ const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
221
+ if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
222
+ throw new UnsupportedFunctionalityError2({
223
+ functionality: "useLegacyFunctionCalling with parallelToolCalls"
224
+ });
225
+ }
176
226
  const baseArgs = {
177
227
  // model id:
178
228
  model: this.modelId,
@@ -188,41 +238,69 @@ var OpenAIChatLanguageModel = class {
188
238
  top_p: topP,
189
239
  frequency_penalty: frequencyPenalty,
190
240
  presence_penalty: presencePenalty,
241
+ stop: stopSequences,
191
242
  seed,
243
+ // response format:
244
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
192
245
  // messages:
193
- messages: convertToOpenAIChatMessages(prompt)
246
+ messages: convertToOpenAIChatMessages({
247
+ prompt,
248
+ useLegacyFunctionCalling
249
+ })
194
250
  };
195
251
  switch (type) {
196
252
  case "regular": {
197
- return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };
253
+ return {
254
+ args: {
255
+ ...baseArgs,
256
+ ...prepareToolsAndToolChoice({ mode, useLegacyFunctionCalling })
257
+ },
258
+ warnings
259
+ };
198
260
  }
199
261
  case "object-json": {
200
262
  return {
201
- ...baseArgs,
202
- response_format: { type: "json_object" }
263
+ args: {
264
+ ...baseArgs,
265
+ response_format: { type: "json_object" }
266
+ },
267
+ warnings
203
268
  };
204
269
  }
205
270
  case "object-tool": {
206
271
  return {
207
- ...baseArgs,
208
- tool_choice: { type: "function", function: { name: mode.tool.name } },
209
- tools: [
210
- {
211
- type: "function",
212
- function: {
272
+ args: useLegacyFunctionCalling ? {
273
+ ...baseArgs,
274
+ function_call: {
275
+ name: mode.tool.name
276
+ },
277
+ functions: [
278
+ {
213
279
  name: mode.tool.name,
214
280
  description: mode.tool.description,
215
281
  parameters: mode.tool.parameters
216
282
  }
217
- }
218
- ]
283
+ ]
284
+ } : {
285
+ ...baseArgs,
286
+ tool_choice: {
287
+ type: "function",
288
+ function: { name: mode.tool.name }
289
+ },
290
+ tools: [
291
+ {
292
+ type: "function",
293
+ function: {
294
+ name: mode.tool.name,
295
+ description: mode.tool.description,
296
+ parameters: mode.tool.parameters
297
+ }
298
+ }
299
+ ]
300
+ },
301
+ warnings
219
302
  };
220
303
  }
221
- case "object-grammar": {
222
- throw new UnsupportedFunctionalityError({
223
- functionality: "object-grammar mode"
224
- });
225
- }
226
304
  default: {
227
305
  const _exhaustiveCheck = type;
228
306
  throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
@@ -231,7 +309,7 @@ var OpenAIChatLanguageModel = class {
231
309
  }
232
310
  async doGenerate(options) {
233
311
  var _a, _b;
234
- const args = this.getArgs(options);
312
+ const { args, warnings } = this.getArgs(options);
235
313
  const { responseHeaders, value: response } = await postJsonToApi({
236
314
  url: this.config.url({
237
315
  path: "/chat/completions",
@@ -250,7 +328,14 @@ var OpenAIChatLanguageModel = class {
250
328
  const choice = response.choices[0];
251
329
  return {
252
330
  text: (_a = choice.message.content) != null ? _a : void 0,
253
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
331
+ toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
332
+ {
333
+ toolCallType: "function",
334
+ toolCallId: generateId(),
335
+ toolName: choice.message.function_call.name,
336
+ args: choice.message.function_call.arguments
337
+ }
338
+ ] : (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
254
339
  var _a2;
255
340
  return {
256
341
  toolCallType: "function",
@@ -266,12 +351,12 @@ var OpenAIChatLanguageModel = class {
266
351
  },
267
352
  rawCall: { rawPrompt, rawSettings },
268
353
  rawResponse: { headers: responseHeaders },
269
- warnings: [],
354
+ warnings,
270
355
  logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs)
271
356
  };
272
357
  }
273
358
  async doStream(options) {
274
- const args = this.getArgs(options);
359
+ const { args, warnings } = this.getArgs(options);
275
360
  const { responseHeaders, value: response } = await postJsonToApi({
276
361
  url: this.config.url({
277
362
  path: "/chat/completions",
@@ -299,6 +384,7 @@ var OpenAIChatLanguageModel = class {
299
384
  completionTokens: Number.NaN
300
385
  };
301
386
  let logprobs;
387
+ const { useLegacyFunctionCalling } = this.settings;
302
388
  return {
303
389
  stream: response.pipeThrough(
304
390
  new TransformStream({
@@ -343,8 +429,16 @@ var OpenAIChatLanguageModel = class {
343
429
  logprobs = [];
344
430
  logprobs.push(...mappedLogprobs);
345
431
  }
346
- if (delta.tool_calls != null) {
347
- for (const toolCallDelta of delta.tool_calls) {
432
+ const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
433
+ {
434
+ type: "function",
435
+ id: generateId(),
436
+ function: delta.function_call,
437
+ index: 0
438
+ }
439
+ ] : delta.tool_calls;
440
+ if (mappedToolCalls != null) {
441
+ for (const toolCallDelta of mappedToolCalls) {
348
442
  const index = toolCallDelta.index;
349
443
  if (toolCalls[index] == null) {
350
444
  if (toolCallDelta.type !== "function") {
@@ -427,7 +521,7 @@ var OpenAIChatLanguageModel = class {
427
521
  ),
428
522
  rawCall: { rawPrompt, rawSettings },
429
523
  rawResponse: { headers: responseHeaders },
430
- warnings: []
524
+ warnings
431
525
  };
432
526
  }
433
527
  };
@@ -436,17 +530,21 @@ var openAIChatResponseSchema = z2.object({
436
530
  z2.object({
437
531
  message: z2.object({
438
532
  role: z2.literal("assistant"),
439
- content: z2.string().nullable().optional(),
533
+ content: z2.string().nullish(),
534
+ function_call: z2.object({
535
+ arguments: z2.string(),
536
+ name: z2.string()
537
+ }).nullish(),
440
538
  tool_calls: z2.array(
441
539
  z2.object({
442
- id: z2.string().optional().nullable(),
540
+ id: z2.string().nullish(),
443
541
  type: z2.literal("function"),
444
542
  function: z2.object({
445
543
  name: z2.string(),
446
544
  arguments: z2.string()
447
545
  })
448
546
  })
449
- ).optional()
547
+ ).nullish()
450
548
  }),
451
549
  index: z2.number(),
452
550
  logprobs: z2.object({
@@ -462,8 +560,8 @@ var openAIChatResponseSchema = z2.object({
462
560
  )
463
561
  })
464
562
  ).nullable()
465
- }).nullable().optional(),
466
- finish_reason: z2.string().optional().nullable()
563
+ }).nullish(),
564
+ finish_reason: z2.string().nullish()
467
565
  })
468
566
  ),
469
567
  usage: z2.object({
@@ -478,6 +576,10 @@ var openaiChatChunkSchema = z2.union([
478
576
  delta: z2.object({
479
577
  role: z2.enum(["assistant"]).optional(),
480
578
  content: z2.string().nullish(),
579
+ function_call: z2.object({
580
+ name: z2.string().optional(),
581
+ arguments: z2.string().optional()
582
+ }).nullish(),
481
583
  tool_calls: z2.array(
482
584
  z2.object({
483
585
  index: z2.number(),
@@ -515,12 +617,45 @@ var openaiChatChunkSchema = z2.union([
515
617
  }),
516
618
  openAIErrorDataSchema
517
619
  ]);
518
- function prepareToolsAndToolChoice(mode) {
620
+ function prepareToolsAndToolChoice({
621
+ mode,
622
+ useLegacyFunctionCalling = false
623
+ }) {
519
624
  var _a;
520
625
  const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
521
626
  if (tools == null) {
522
627
  return { tools: void 0, tool_choice: void 0 };
523
628
  }
629
+ const toolChoice = mode.toolChoice;
630
+ if (useLegacyFunctionCalling) {
631
+ const mappedFunctions = tools.map((tool) => ({
632
+ name: tool.name,
633
+ description: tool.description,
634
+ parameters: tool.parameters
635
+ }));
636
+ if (toolChoice == null) {
637
+ return { functions: mappedFunctions, function_call: void 0 };
638
+ }
639
+ const type2 = toolChoice.type;
640
+ switch (type2) {
641
+ case "auto":
642
+ case "none":
643
+ case void 0:
644
+ return {
645
+ functions: mappedFunctions,
646
+ function_call: void 0
647
+ };
648
+ case "required":
649
+ throw new UnsupportedFunctionalityError2({
650
+ functionality: "useLegacyFunctionCalling and toolChoice: required"
651
+ });
652
+ default:
653
+ return {
654
+ functions: mappedFunctions,
655
+ function_call: { name: toolChoice.toolName }
656
+ };
657
+ }
658
+ }
524
659
  const mappedTools = tools.map((tool) => ({
525
660
  type: "function",
526
661
  function: {
@@ -529,7 +664,6 @@ function prepareToolsAndToolChoice(mode) {
529
664
  parameters: tool.parameters
530
665
  }
531
666
  }));
532
- const toolChoice = mode.toolChoice;
533
667
  if (toolChoice == null) {
534
668
  return { tools: mappedTools, tool_choice: void 0 };
535
669
  }
@@ -558,7 +692,7 @@ function prepareToolsAndToolChoice(mode) {
558
692
 
559
693
  // src/openai-completion-language-model.ts
560
694
  import {
561
- UnsupportedFunctionalityError as UnsupportedFunctionalityError3
695
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError4
562
696
  } from "@ai-sdk/provider";
563
697
  import {
564
698
  combineHeaders as combineHeaders2,
@@ -571,7 +705,7 @@ import { z as z3 } from "zod";
571
705
  // src/convert-to-openai-completion-prompt.ts
572
706
  import {
573
707
  InvalidPromptError,
574
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
708
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError3
575
709
  } from "@ai-sdk/provider";
576
710
  function convertToOpenAICompletionPrompt({
577
711
  prompt,
@@ -604,7 +738,7 @@ function convertToOpenAICompletionPrompt({
604
738
  return part.text;
605
739
  }
606
740
  case "image": {
607
- throw new UnsupportedFunctionalityError2({
741
+ throw new UnsupportedFunctionalityError3({
608
742
  functionality: "images"
609
743
  });
610
744
  }
@@ -623,7 +757,7 @@ ${userMessage}
623
757
  return part.text;
624
758
  }
625
759
  case "tool-call": {
626
- throw new UnsupportedFunctionalityError2({
760
+ throw new UnsupportedFunctionalityError3({
627
761
  functionality: "tool-call messages"
628
762
  });
629
763
  }
@@ -636,7 +770,7 @@ ${assistantMessage}
636
770
  break;
637
771
  }
638
772
  case "tool": {
639
- throw new UnsupportedFunctionalityError2({
773
+ throw new UnsupportedFunctionalityError3({
640
774
  functionality: "tool messages"
641
775
  });
642
776
  }
@@ -688,13 +822,31 @@ var OpenAICompletionLanguageModel = class {
688
822
  maxTokens,
689
823
  temperature,
690
824
  topP,
825
+ topK,
691
826
  frequencyPenalty,
692
827
  presencePenalty,
828
+ stopSequences: userStopSequences,
829
+ responseFormat,
693
830
  seed
694
831
  }) {
695
832
  var _a;
696
833
  const type = mode.type;
834
+ const warnings = [];
835
+ if (topK != null) {
836
+ warnings.push({
837
+ type: "unsupported-setting",
838
+ setting: "topK"
839
+ });
840
+ }
841
+ if (responseFormat != null && responseFormat.type !== "text") {
842
+ warnings.push({
843
+ type: "unsupported-setting",
844
+ setting: "responseFormat",
845
+ details: "JSON response format is not supported."
846
+ });
847
+ }
697
848
  const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
849
+ const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
698
850
  const baseArgs = {
699
851
  // model id:
700
852
  model: this.modelId,
@@ -714,37 +866,32 @@ var OpenAICompletionLanguageModel = class {
714
866
  // prompt:
715
867
  prompt: completionPrompt,
716
868
  // stop sequences:
717
- stop: stopSequences
869
+ stop: stop.length > 0 ? stop : void 0
718
870
  };
719
871
  switch (type) {
720
872
  case "regular": {
721
873
  if ((_a = mode.tools) == null ? void 0 : _a.length) {
722
- throw new UnsupportedFunctionalityError3({
874
+ throw new UnsupportedFunctionalityError4({
723
875
  functionality: "tools"
724
876
  });
725
877
  }
726
878
  if (mode.toolChoice) {
727
- throw new UnsupportedFunctionalityError3({
879
+ throw new UnsupportedFunctionalityError4({
728
880
  functionality: "toolChoice"
729
881
  });
730
882
  }
731
- return baseArgs;
883
+ return { args: baseArgs, warnings };
732
884
  }
733
885
  case "object-json": {
734
- throw new UnsupportedFunctionalityError3({
886
+ throw new UnsupportedFunctionalityError4({
735
887
  functionality: "object-json mode"
736
888
  });
737
889
  }
738
890
  case "object-tool": {
739
- throw new UnsupportedFunctionalityError3({
891
+ throw new UnsupportedFunctionalityError4({
740
892
  functionality: "object-tool mode"
741
893
  });
742
894
  }
743
- case "object-grammar": {
744
- throw new UnsupportedFunctionalityError3({
745
- functionality: "object-grammar mode"
746
- });
747
- }
748
895
  default: {
749
896
  const _exhaustiveCheck = type;
750
897
  throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
@@ -752,7 +899,7 @@ var OpenAICompletionLanguageModel = class {
752
899
  }
753
900
  }
754
901
  async doGenerate(options) {
755
- const args = this.getArgs(options);
902
+ const { args, warnings } = this.getArgs(options);
756
903
  const { responseHeaders, value: response } = await postJsonToApi2({
757
904
  url: this.config.url({
758
905
  path: "/completions",
@@ -779,11 +926,11 @@ var OpenAICompletionLanguageModel = class {
779
926
  logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
780
927
  rawCall: { rawPrompt, rawSettings },
781
928
  rawResponse: { headers: responseHeaders },
782
- warnings: []
929
+ warnings
783
930
  };
784
931
  }
785
932
  async doStream(options) {
786
- const args = this.getArgs(options);
933
+ const { args, warnings } = this.getArgs(options);
787
934
  const { responseHeaders, value: response } = await postJsonToApi2({
788
935
  url: this.config.url({
789
936
  path: "/completions",
@@ -791,7 +938,7 @@ var OpenAICompletionLanguageModel = class {
791
938
  }),
792
939
  headers: combineHeaders2(this.config.headers(), options.headers),
793
940
  body: {
794
- ...this.getArgs(options),
941
+ ...args,
795
942
  stream: true,
796
943
  // only include stream_options when in strict compatibility mode:
797
944
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
@@ -862,7 +1009,7 @@ var OpenAICompletionLanguageModel = class {
862
1009
  ),
863
1010
  rawCall: { rawPrompt, rawSettings },
864
1011
  rawResponse: { headers: responseHeaders },
865
- warnings: []
1012
+ warnings
866
1013
  };
867
1014
  }
868
1015
  };