@ai-sdk/openai-compatible 2.0.0-beta.57 → 2.0.0-beta.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 2.0.0-beta.59
4
+
5
+ ### Patch Changes
6
+
7
+ - 2625a04: feat(openai); update spec for mcp approval
8
+ - Updated dependencies [2625a04]
9
+ - @ai-sdk/provider@3.0.0-beta.31
10
+ - @ai-sdk/provider-utils@4.0.0-beta.58
11
+
12
+ ## 2.0.0-beta.58
13
+
14
+ ### Patch Changes
15
+
16
+ - cbf52cd: feat: expose raw finish reason
17
+ - Updated dependencies [cbf52cd]
18
+ - @ai-sdk/provider@3.0.0-beta.30
19
+ - @ai-sdk/provider-utils@4.0.0-beta.57
20
+
3
21
  ## 2.0.0-beta.57
4
22
 
5
23
  ### Patch Changes
package/dist/index.js CHANGED
@@ -181,6 +181,9 @@ function convertToOpenAICompatibleChatMessages(prompt) {
181
181
  }
182
182
  case "tool": {
183
183
  for (const toolResponse of content) {
184
+ if (toolResponse.type === "tool-approval-response") {
185
+ continue;
186
+ }
184
187
  const output = toolResponse.output;
185
188
  let contentValue;
186
189
  switch (output.type) {
@@ -242,7 +245,7 @@ function mapOpenAICompatibleFinishReason(finishReason) {
242
245
  case "tool_calls":
243
246
  return "tool-calls";
244
247
  default:
245
- return "unknown";
248
+ return "other";
246
249
  }
247
250
  }
248
251
 
@@ -434,7 +437,7 @@ var OpenAICompatibleChatLanguageModel = class {
434
437
  };
435
438
  }
436
439
  async doGenerate(options) {
437
- var _a, _b, _c, _d, _e;
440
+ var _a, _b, _c, _d, _e, _f;
438
441
  const { args, warnings } = await this.getArgs({ ...options });
439
442
  const body = JSON.stringify(args);
440
443
  const {
@@ -493,7 +496,10 @@ var OpenAICompatibleChatLanguageModel = class {
493
496
  }
494
497
  return {
495
498
  content,
496
- finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
499
+ finishReason: {
500
+ unified: mapOpenAICompatibleFinishReason(choice.finish_reason),
501
+ raw: (_f = choice.finish_reason) != null ? _f : void 0
502
+ },
497
503
  usage: convertOpenAICompatibleChatUsage(responseBody.usage),
498
504
  providerMetadata,
499
505
  request: { body },
@@ -530,7 +536,10 @@ var OpenAICompatibleChatLanguageModel = class {
530
536
  fetch: this.config.fetch
531
537
  });
532
538
  const toolCalls = [];
533
- let finishReason = "unknown";
539
+ let finishReason = {
540
+ unified: "other",
541
+ raw: void 0
542
+ };
534
543
  let usage = void 0;
535
544
  let isFirstChunk = true;
536
545
  const providerOptionsName = this.providerOptionsName;
@@ -543,18 +552,18 @@ var OpenAICompatibleChatLanguageModel = class {
543
552
  controller.enqueue({ type: "stream-start", warnings });
544
553
  },
545
554
  transform(chunk, controller) {
546
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
555
+ var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
547
556
  if (options.includeRawChunks) {
548
557
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
549
558
  }
550
559
  if (!chunk.success) {
551
- finishReason = "error";
560
+ finishReason = { unified: "error", raw: void 0 };
552
561
  controller.enqueue({ type: "error", error: chunk.error });
553
562
  return;
554
563
  }
555
564
  metadataExtractor == null ? void 0 : metadataExtractor.processChunk(chunk.rawValue);
556
565
  if ("error" in chunk.value) {
557
- finishReason = "error";
566
+ finishReason = { unified: "error", raw: void 0 };
558
567
  controller.enqueue({
559
568
  type: "error",
560
569
  error: chunk.value.error.message
@@ -574,15 +583,16 @@ var OpenAICompatibleChatLanguageModel = class {
574
583
  }
575
584
  const choice = value.choices[0];
576
585
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
577
- finishReason = mapOpenAICompatibleFinishReason(
578
- choice.finish_reason
579
- );
586
+ finishReason = {
587
+ unified: mapOpenAICompatibleFinishReason(choice.finish_reason),
588
+ raw: (_a2 = choice.finish_reason) != null ? _a2 : void 0
589
+ };
580
590
  }
581
591
  if ((choice == null ? void 0 : choice.delta) == null) {
582
592
  return;
583
593
  }
584
594
  const delta = choice.delta;
585
- const reasoningContent = (_a2 = delta.reasoning_content) != null ? _a2 : delta.reasoning;
595
+ const reasoningContent = (_b = delta.reasoning_content) != null ? _b : delta.reasoning;
586
596
  if (reasoningContent) {
587
597
  if (!isActiveReasoning) {
588
598
  controller.enqueue({
@@ -618,7 +628,7 @@ var OpenAICompatibleChatLanguageModel = class {
618
628
  message: `Expected 'id' to be a string.`
619
629
  });
620
630
  }
621
- if (((_b = toolCallDelta.function) == null ? void 0 : _b.name) == null) {
631
+ if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
622
632
  throw new import_provider3.InvalidResponseDataError({
623
633
  data: toolCallDelta,
624
634
  message: `Expected 'function.name' to be a string.`
@@ -634,12 +644,12 @@ var OpenAICompatibleChatLanguageModel = class {
634
644
  type: "function",
635
645
  function: {
636
646
  name: toolCallDelta.function.name,
637
- arguments: (_c = toolCallDelta.function.arguments) != null ? _c : ""
647
+ arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
638
648
  },
639
649
  hasFinished: false
640
650
  };
641
651
  const toolCall2 = toolCalls[index];
642
- if (((_d = toolCall2.function) == null ? void 0 : _d.name) != null && ((_e = toolCall2.function) == null ? void 0 : _e.arguments) != null) {
652
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
643
653
  if (toolCall2.function.arguments.length > 0) {
644
654
  controller.enqueue({
645
655
  type: "tool-input-delta",
@@ -654,7 +664,7 @@ var OpenAICompatibleChatLanguageModel = class {
654
664
  });
655
665
  controller.enqueue({
656
666
  type: "tool-call",
657
- toolCallId: (_f = toolCall2.id) != null ? _f : (0, import_provider_utils2.generateId)(),
667
+ toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils2.generateId)(),
658
668
  toolName: toolCall2.function.name,
659
669
  input: toolCall2.function.arguments
660
670
  });
@@ -667,22 +677,22 @@ var OpenAICompatibleChatLanguageModel = class {
667
677
  if (toolCall.hasFinished) {
668
678
  continue;
669
679
  }
670
- if (((_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null) {
671
- toolCall.function.arguments += (_i = (_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null ? _i : "";
680
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
681
+ toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
672
682
  }
673
683
  controller.enqueue({
674
684
  type: "tool-input-delta",
675
685
  id: toolCall.id,
676
- delta: (_j = toolCallDelta.function.arguments) != null ? _j : ""
686
+ delta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
677
687
  });
678
- if (((_k = toolCall.function) == null ? void 0 : _k.name) != null && ((_l = toolCall.function) == null ? void 0 : _l.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
688
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
679
689
  controller.enqueue({
680
690
  type: "tool-input-end",
681
691
  id: toolCall.id
682
692
  });
683
693
  controller.enqueue({
684
694
  type: "tool-call",
685
- toolCallId: (_m = toolCall.id) != null ? _m : (0, import_provider_utils2.generateId)(),
695
+ toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils2.generateId)(),
686
696
  toolName: toolCall.function.name,
687
697
  input: toolCall.function.arguments
688
698
  });
@@ -949,7 +959,7 @@ function mapOpenAICompatibleFinishReason2(finishReason) {
949
959
  case "tool_calls":
950
960
  return "tool-calls";
951
961
  default:
952
- return "unknown";
962
+ return "other";
953
963
  }
954
964
  }
955
965
 
@@ -1095,7 +1105,10 @@ var OpenAICompatibleCompletionLanguageModel = class {
1095
1105
  return {
1096
1106
  content,
1097
1107
  usage: convertOpenAICompatibleCompletionUsage(response.usage),
1098
- finishReason: mapOpenAICompatibleFinishReason2(choice.finish_reason),
1108
+ finishReason: {
1109
+ unified: mapOpenAICompatibleFinishReason2(choice.finish_reason),
1110
+ raw: choice.finish_reason
1111
+ },
1099
1112
  request: { body: args },
1100
1113
  response: {
1101
1114
  ...getResponseMetadata2(response),
@@ -1127,7 +1140,10 @@ var OpenAICompatibleCompletionLanguageModel = class {
1127
1140
  abortSignal: options.abortSignal,
1128
1141
  fetch: this.config.fetch
1129
1142
  });
1130
- let finishReason = "unknown";
1143
+ let finishReason = {
1144
+ unified: "other",
1145
+ raw: void 0
1146
+ };
1131
1147
  let usage = void 0;
1132
1148
  let isFirstChunk = true;
1133
1149
  return {
@@ -1137,17 +1153,18 @@ var OpenAICompatibleCompletionLanguageModel = class {
1137
1153
  controller.enqueue({ type: "stream-start", warnings });
1138
1154
  },
1139
1155
  transform(chunk, controller) {
1156
+ var _a;
1140
1157
  if (options.includeRawChunks) {
1141
1158
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1142
1159
  }
1143
1160
  if (!chunk.success) {
1144
- finishReason = "error";
1161
+ finishReason = { unified: "error", raw: void 0 };
1145
1162
  controller.enqueue({ type: "error", error: chunk.error });
1146
1163
  return;
1147
1164
  }
1148
1165
  const value = chunk.value;
1149
1166
  if ("error" in value) {
1150
- finishReason = "error";
1167
+ finishReason = { unified: "error", raw: void 0 };
1151
1168
  controller.enqueue({ type: "error", error: value.error });
1152
1169
  return;
1153
1170
  }
@@ -1167,9 +1184,10 @@ var OpenAICompatibleCompletionLanguageModel = class {
1167
1184
  }
1168
1185
  const choice = value.choices[0];
1169
1186
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
1170
- finishReason = mapOpenAICompatibleFinishReason2(
1171
- choice.finish_reason
1172
- );
1187
+ finishReason = {
1188
+ unified: mapOpenAICompatibleFinishReason2(choice.finish_reason),
1189
+ raw: (_a = choice.finish_reason) != null ? _a : void 0
1190
+ };
1173
1191
  }
1174
1192
  if ((choice == null ? void 0 : choice.text) != null) {
1175
1193
  controller.enqueue({
@@ -1461,7 +1479,7 @@ async function fileToBlob(file) {
1461
1479
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
1462
1480
 
1463
1481
  // src/version.ts
1464
- var VERSION = true ? "2.0.0-beta.57" : "0.0.0-test";
1482
+ var VERSION = true ? "2.0.0-beta.59" : "0.0.0-test";
1465
1483
 
1466
1484
  // src/openai-compatible-provider.ts
1467
1485
  function createOpenAICompatible(options) {