@ai-sdk/openai-compatible 2.0.6 → 2.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 2.0.7
4
+
5
+ ### Patch Changes
6
+
7
+ - cd7bb0e: feat(openai-compat): add thoughtSignature handling for google models
8
+
3
9
  ## 2.0.6
4
10
 
5
11
  ### Patch Changes
package/dist/index.js CHANGED
@@ -99,7 +99,7 @@ function getOpenAIMetadata(message) {
99
99
  return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
100
100
  }
101
101
  function convertToOpenAICompatibleChatMessages(prompt) {
102
- var _a;
102
+ var _a, _b, _c;
103
103
  const messages = [];
104
104
  for (const { role, content, ...message } of prompt) {
105
105
  const metadata = getOpenAIMetadata({ ...message });
@@ -158,6 +158,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
158
158
  break;
159
159
  }
160
160
  case "tool-call": {
161
+ const thoughtSignature = (_b = (_a = part.providerOptions) == null ? void 0 : _a.google) == null ? void 0 : _b.thoughtSignature;
161
162
  toolCalls.push({
162
163
  id: part.toolCallId,
163
164
  type: "function",
@@ -165,7 +166,15 @@ function convertToOpenAICompatibleChatMessages(prompt) {
165
166
  name: part.toolName,
166
167
  arguments: JSON.stringify(part.input)
167
168
  },
168
- ...partMetadata
169
+ ...partMetadata,
170
+ // Include extra_content for Google Gemini thought signatures
171
+ ...thoughtSignature ? {
172
+ extra_content: {
173
+ google: {
174
+ thought_signature: String(thoughtSignature)
175
+ }
176
+ }
177
+ } : {}
169
178
  });
170
179
  break;
171
180
  }
@@ -192,7 +201,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
192
201
  contentValue = output.value;
193
202
  break;
194
203
  case "execution-denied":
195
- contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
204
+ contentValue = (_c = output.reason) != null ? _c : "Tool execution denied.";
196
205
  break;
197
206
  case "content":
198
207
  case "json":
@@ -437,7 +446,7 @@ var OpenAICompatibleChatLanguageModel = class {
437
446
  };
438
447
  }
439
448
  async doGenerate(options) {
440
- var _a, _b, _c, _d, _e, _f;
449
+ var _a, _b, _c, _d, _e, _f, _g, _h;
441
450
  const { args, warnings } = await this.getArgs({ ...options });
442
451
  const body = JSON.stringify(args);
443
452
  const {
@@ -473,21 +482,27 @@ var OpenAICompatibleChatLanguageModel = class {
473
482
  }
474
483
  if (choice.message.tool_calls != null) {
475
484
  for (const toolCall of choice.message.tool_calls) {
485
+ const thoughtSignature = (_c = (_b = toolCall.extra_content) == null ? void 0 : _b.google) == null ? void 0 : _c.thought_signature;
476
486
  content.push({
477
487
  type: "tool-call",
478
- toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils2.generateId)(),
488
+ toolCallId: (_d = toolCall.id) != null ? _d : (0, import_provider_utils2.generateId)(),
479
489
  toolName: toolCall.function.name,
480
- input: toolCall.function.arguments
490
+ input: toolCall.function.arguments,
491
+ ...thoughtSignature ? {
492
+ providerMetadata: {
493
+ [this.providerOptionsName]: { thoughtSignature }
494
+ }
495
+ } : {}
481
496
  });
482
497
  }
483
498
  }
484
499
  const providerMetadata = {
485
500
  [this.providerOptionsName]: {},
486
- ...await ((_d = (_c = this.config.metadataExtractor) == null ? void 0 : _c.extractMetadata) == null ? void 0 : _d.call(_c, {
501
+ ...await ((_f = (_e = this.config.metadataExtractor) == null ? void 0 : _e.extractMetadata) == null ? void 0 : _f.call(_e, {
487
502
  parsedBody: rawResponse
488
503
  }))
489
504
  };
490
- const completionTokenDetails = (_e = responseBody.usage) == null ? void 0 : _e.completion_tokens_details;
505
+ const completionTokenDetails = (_g = responseBody.usage) == null ? void 0 : _g.completion_tokens_details;
491
506
  if ((completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens) != null) {
492
507
  providerMetadata[this.providerOptionsName].acceptedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens;
493
508
  }
@@ -498,7 +513,7 @@ var OpenAICompatibleChatLanguageModel = class {
498
513
  content,
499
514
  finishReason: {
500
515
  unified: mapOpenAICompatibleFinishReason(choice.finish_reason),
501
- raw: (_f = choice.finish_reason) != null ? _f : void 0
516
+ raw: (_h = choice.finish_reason) != null ? _h : void 0
502
517
  },
503
518
  usage: convertOpenAICompatibleChatUsage(responseBody.usage),
504
519
  providerMetadata,
@@ -552,7 +567,7 @@ var OpenAICompatibleChatLanguageModel = class {
552
567
  controller.enqueue({ type: "stream-start", warnings });
553
568
  },
554
569
  transform(chunk, controller) {
555
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
570
+ var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
556
571
  if (options.includeRawChunks) {
557
572
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
558
573
  }
@@ -620,7 +635,7 @@ var OpenAICompatibleChatLanguageModel = class {
620
635
  }
621
636
  if (delta.tool_calls != null) {
622
637
  for (const toolCallDelta of delta.tool_calls) {
623
- const index = toolCallDelta.index;
638
+ const index = (_c = toolCallDelta.index) != null ? _c : toolCalls.length;
624
639
  if (toolCalls[index] == null) {
625
640
  if (toolCallDelta.id == null) {
626
641
  throw new import_provider3.InvalidResponseDataError({
@@ -628,7 +643,7 @@ var OpenAICompatibleChatLanguageModel = class {
628
643
  message: `Expected 'id' to be a string.`
629
644
  });
630
645
  }
631
- if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
646
+ if (((_d = toolCallDelta.function) == null ? void 0 : _d.name) == null) {
632
647
  throw new import_provider3.InvalidResponseDataError({
633
648
  data: toolCallDelta,
634
649
  message: `Expected 'function.name' to be a string.`
@@ -644,12 +659,13 @@ var OpenAICompatibleChatLanguageModel = class {
644
659
  type: "function",
645
660
  function: {
646
661
  name: toolCallDelta.function.name,
647
- arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
662
+ arguments: (_e = toolCallDelta.function.arguments) != null ? _e : ""
648
663
  },
649
- hasFinished: false
664
+ hasFinished: false,
665
+ thoughtSignature: (_h = (_g = (_f = toolCallDelta.extra_content) == null ? void 0 : _f.google) == null ? void 0 : _g.thought_signature) != null ? _h : void 0
650
666
  };
651
667
  const toolCall2 = toolCalls[index];
652
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
668
+ if (((_i = toolCall2.function) == null ? void 0 : _i.name) != null && ((_j = toolCall2.function) == null ? void 0 : _j.arguments) != null) {
653
669
  if (toolCall2.function.arguments.length > 0) {
654
670
  controller.enqueue({
655
671
  type: "tool-input-delta",
@@ -664,9 +680,16 @@ var OpenAICompatibleChatLanguageModel = class {
664
680
  });
665
681
  controller.enqueue({
666
682
  type: "tool-call",
667
- toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils2.generateId)(),
683
+ toolCallId: (_k = toolCall2.id) != null ? _k : (0, import_provider_utils2.generateId)(),
668
684
  toolName: toolCall2.function.name,
669
- input: toolCall2.function.arguments
685
+ input: toolCall2.function.arguments,
686
+ ...toolCall2.thoughtSignature ? {
687
+ providerMetadata: {
688
+ [providerOptionsName]: {
689
+ thoughtSignature: toolCall2.thoughtSignature
690
+ }
691
+ }
692
+ } : {}
670
693
  });
671
694
  toolCall2.hasFinished = true;
672
695
  }
@@ -677,24 +700,31 @@ var OpenAICompatibleChatLanguageModel = class {
677
700
  if (toolCall.hasFinished) {
678
701
  continue;
679
702
  }
680
- if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
681
- toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
703
+ if (((_l = toolCallDelta.function) == null ? void 0 : _l.arguments) != null) {
704
+ toolCall.function.arguments += (_n = (_m = toolCallDelta.function) == null ? void 0 : _m.arguments) != null ? _n : "";
682
705
  }
683
706
  controller.enqueue({
684
707
  type: "tool-input-delta",
685
708
  id: toolCall.id,
686
- delta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
709
+ delta: (_o = toolCallDelta.function.arguments) != null ? _o : ""
687
710
  });
688
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
711
+ if (((_p = toolCall.function) == null ? void 0 : _p.name) != null && ((_q = toolCall.function) == null ? void 0 : _q.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
689
712
  controller.enqueue({
690
713
  type: "tool-input-end",
691
714
  id: toolCall.id
692
715
  });
693
716
  controller.enqueue({
694
717
  type: "tool-call",
695
- toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils2.generateId)(),
718
+ toolCallId: (_r = toolCall.id) != null ? _r : (0, import_provider_utils2.generateId)(),
696
719
  toolName: toolCall.function.name,
697
- input: toolCall.function.arguments
720
+ input: toolCall.function.arguments,
721
+ ...toolCall.thoughtSignature ? {
722
+ providerMetadata: {
723
+ [providerOptionsName]: {
724
+ thoughtSignature: toolCall.thoughtSignature
725
+ }
726
+ }
727
+ } : {}
698
728
  });
699
729
  toolCall.hasFinished = true;
700
730
  }
@@ -720,7 +750,14 @@ var OpenAICompatibleChatLanguageModel = class {
720
750
  type: "tool-call",
721
751
  toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
722
752
  toolName: toolCall.function.name,
723
- input: toolCall.function.arguments
753
+ input: toolCall.function.arguments,
754
+ ...toolCall.thoughtSignature ? {
755
+ providerMetadata: {
756
+ [providerOptionsName]: {
757
+ thoughtSignature: toolCall.thoughtSignature
758
+ }
759
+ }
760
+ } : {}
724
761
  });
725
762
  }
726
763
  const providerMetadata = {
@@ -777,7 +814,13 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.looseObject({
777
814
  function: import_v43.z.object({
778
815
  name: import_v43.z.string(),
779
816
  arguments: import_v43.z.string()
780
- })
817
+ }),
818
+ // Support for Google Gemini thought signatures via OpenAI compatibility
819
+ extra_content: import_v43.z.object({
820
+ google: import_v43.z.object({
821
+ thought_signature: import_v43.z.string().nullish()
822
+ }).nullish()
823
+ }).nullish()
781
824
  })
782
825
  ).nullish()
783
826
  }),
@@ -801,12 +844,19 @@ var chunkBaseSchema = import_v43.z.looseObject({
801
844
  reasoning: import_v43.z.string().nullish(),
802
845
  tool_calls: import_v43.z.array(
803
846
  import_v43.z.object({
804
- index: import_v43.z.number(),
847
+ index: import_v43.z.number().nullish(),
848
+ //google does not send index
805
849
  id: import_v43.z.string().nullish(),
806
850
  function: import_v43.z.object({
807
851
  name: import_v43.z.string().nullish(),
808
852
  arguments: import_v43.z.string().nullish()
809
- })
853
+ }),
854
+ // Support for Google Gemini thought signatures via OpenAI compatibility
855
+ extra_content: import_v43.z.object({
856
+ google: import_v43.z.object({
857
+ thought_signature: import_v43.z.string().nullish()
858
+ }).nullish()
859
+ }).nullish()
810
860
  })
811
861
  ).nullish()
812
862
  }).nullish(),
@@ -1479,7 +1529,7 @@ async function fileToBlob(file) {
1479
1529
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
1480
1530
 
1481
1531
  // src/version.ts
1482
- var VERSION = true ? "2.0.6" : "0.0.0-test";
1532
+ var VERSION = true ? "2.0.7" : "0.0.0-test";
1483
1533
 
1484
1534
  // src/openai-compatible-provider.ts
1485
1535
  function createOpenAICompatible(options) {