@ai-sdk/openai-compatible 1.0.0-alpha.8 → 1.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,89 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 1.0.0-beta.1
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [742b7be]
8
+ - Updated dependencies [7cddb72]
9
+ - Updated dependencies [ccce59b]
10
+ - Updated dependencies [e2b9e4b]
11
+ - Updated dependencies [45c1ea2]
12
+ - Updated dependencies [e025824]
13
+ - Updated dependencies [0d06df6]
14
+ - Updated dependencies [472524a]
15
+ - Updated dependencies [dd3ff01]
16
+ - Updated dependencies [7435eb5]
17
+ - Updated dependencies [cb68df0]
18
+ - Updated dependencies [bfdca8d]
19
+ - Updated dependencies [44f4aba]
20
+ - Updated dependencies [023ba40]
21
+ - Updated dependencies [5e57fae]
22
+ - Updated dependencies [71f938d]
23
+ - Updated dependencies [28a5ed5]
24
+ - @ai-sdk/provider@2.0.0-beta.1
25
+ - @ai-sdk/provider-utils@3.0.0-beta.1
26
+
27
+ ## 1.0.0-alpha.15
28
+
29
+ ### Patch Changes
30
+
31
+ - Updated dependencies [48d257a]
32
+ - Updated dependencies [8ba77a7]
33
+ - @ai-sdk/provider@2.0.0-alpha.15
34
+ - @ai-sdk/provider-utils@3.0.0-alpha.15
35
+
36
+ ## 1.0.0-alpha.14
37
+
38
+ ### Patch Changes
39
+
40
+ - Updated dependencies [b5da06a]
41
+ - Updated dependencies [63f9e9b]
42
+ - Updated dependencies [2e13791]
43
+ - @ai-sdk/provider@2.0.0-alpha.14
44
+ - @ai-sdk/provider-utils@3.0.0-alpha.14
45
+
46
+ ## 1.0.0-alpha.13
47
+
48
+ ### Patch Changes
49
+
50
+ - Updated dependencies [68ecf2f]
51
+ - @ai-sdk/provider@2.0.0-alpha.13
52
+ - @ai-sdk/provider-utils@3.0.0-alpha.13
53
+
54
+ ## 1.0.0-alpha.12
55
+
56
+ ### Patch Changes
57
+
58
+ - e2aceaf: feat: add raw chunk support
59
+ - Updated dependencies [e2aceaf]
60
+ - @ai-sdk/provider@2.0.0-alpha.12
61
+ - @ai-sdk/provider-utils@3.0.0-alpha.12
62
+
63
+ ## 1.0.0-alpha.11
64
+
65
+ ### Patch Changes
66
+
67
+ - Updated dependencies [c1e6647]
68
+ - @ai-sdk/provider@2.0.0-alpha.11
69
+ - @ai-sdk/provider-utils@3.0.0-alpha.11
70
+
71
+ ## 1.0.0-alpha.10
72
+
73
+ ### Patch Changes
74
+
75
+ - Updated dependencies [c4df419]
76
+ - @ai-sdk/provider@2.0.0-alpha.10
77
+ - @ai-sdk/provider-utils@3.0.0-alpha.10
78
+
79
+ ## 1.0.0-alpha.9
80
+
81
+ ### Patch Changes
82
+
83
+ - Updated dependencies [811dff3]
84
+ - @ai-sdk/provider@2.0.0-alpha.9
85
+ - @ai-sdk/provider-utils@3.0.0-alpha.9
86
+
3
87
  ## 1.0.0-alpha.8
4
88
 
5
89
  ### Patch Changes
package/dist/index.js CHANGED
@@ -103,7 +103,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
103
103
  type: "function",
104
104
  function: {
105
105
  name: part.toolName,
106
- arguments: JSON.stringify(part.args)
106
+ arguments: JSON.stringify(part.input)
107
107
  },
108
108
  ...partMetadata
109
109
  });
@@ -121,11 +121,24 @@ function convertToOpenAICompatibleChatMessages(prompt) {
121
121
  }
122
122
  case "tool": {
123
123
  for (const toolResponse of content) {
124
+ const output = toolResponse.output;
125
+ let contentValue;
126
+ switch (output.type) {
127
+ case "text":
128
+ case "error-text":
129
+ contentValue = output.value;
130
+ break;
131
+ case "content":
132
+ case "json":
133
+ case "error-json":
134
+ contentValue = JSON.stringify(output.value);
135
+ break;
136
+ }
124
137
  const toolResponseMetadata = getOpenAIMetadata(toolResponse);
125
138
  messages.push({
126
139
  role: "tool",
127
140
  tool_call_id: toolResponse.toolCallId,
128
- content: JSON.stringify(toolResponse.result),
141
+ content: contentValue,
129
142
  ...toolResponseMetadata
130
143
  });
131
144
  }
@@ -223,7 +236,7 @@ function prepareTools({
223
236
  function: {
224
237
  name: tool.name,
225
238
  description: tool.description,
226
- parameters: tool.parameters
239
+ parameters: tool.inputSchema
227
240
  }
228
241
  });
229
242
  }
@@ -399,10 +412,9 @@ var OpenAICompatibleChatLanguageModel = class {
399
412
  for (const toolCall of choice.message.tool_calls) {
400
413
  content.push({
401
414
  type: "tool-call",
402
- toolCallType: "function",
403
415
  toolCallId: (_a = toolCall.id) != null ? _a : (0, import_provider_utils.generateId)(),
404
416
  toolName: toolCall.function.name,
405
- args: toolCall.function.arguments
417
+ input: toolCall.function.arguments
406
418
  });
407
419
  }
408
420
  }
@@ -480,6 +492,8 @@ var OpenAICompatibleChatLanguageModel = class {
480
492
  };
481
493
  let isFirstChunk = true;
482
494
  const providerOptionsName = this.providerOptionsName;
495
+ let isActiveReasoning = false;
496
+ let isActiveText = false;
483
497
  return {
484
498
  stream: response.pipeThrough(
485
499
  new TransformStream({
@@ -489,6 +503,9 @@ var OpenAICompatibleChatLanguageModel = class {
489
503
  // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX
490
504
  transform(chunk, controller) {
491
505
  var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
506
+ if (options.includeRawChunks) {
507
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
508
+ }
492
509
  if (!chunk.success) {
493
510
  finishReason = "error";
494
511
  controller.enqueue({ type: "error", error: chunk.error });
@@ -543,15 +560,28 @@ var OpenAICompatibleChatLanguageModel = class {
543
560
  }
544
561
  const delta = choice.delta;
545
562
  if (delta.reasoning_content != null) {
563
+ if (!isActiveReasoning) {
564
+ controller.enqueue({
565
+ type: "reasoning-start",
566
+ id: "reasoning-0"
567
+ });
568
+ isActiveReasoning = true;
569
+ }
546
570
  controller.enqueue({
547
- type: "reasoning",
548
- text: delta.reasoning_content
571
+ type: "reasoning-delta",
572
+ id: "reasoning-0",
573
+ delta: delta.reasoning_content
549
574
  });
550
575
  }
551
576
  if (delta.content != null) {
577
+ if (!isActiveText) {
578
+ controller.enqueue({ type: "text-start", id: "txt-0" });
579
+ isActiveText = true;
580
+ }
552
581
  controller.enqueue({
553
- type: "text",
554
- text: delta.content
582
+ type: "text-delta",
583
+ id: "txt-0",
584
+ delta: delta.content
555
585
  });
556
586
  }
557
587
  if (delta.tool_calls != null) {
@@ -576,6 +606,11 @@ var OpenAICompatibleChatLanguageModel = class {
576
606
  message: `Expected 'function.name' to be a string.`
577
607
  });
578
608
  }
609
+ controller.enqueue({
610
+ type: "tool-input-start",
611
+ id: toolCallDelta.id,
612
+ toolName: toolCallDelta.function.name
613
+ });
579
614
  toolCalls[index] = {
580
615
  id: toolCallDelta.id,
581
616
  type: "function",
@@ -589,20 +624,21 @@ var OpenAICompatibleChatLanguageModel = class {
589
624
  if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
590
625
  if (toolCall2.function.arguments.length > 0) {
591
626
  controller.enqueue({
592
- type: "tool-call-delta",
593
- toolCallType: "function",
594
- toolCallId: toolCall2.id,
595
- toolName: toolCall2.function.name,
596
- argsTextDelta: toolCall2.function.arguments
627
+ type: "tool-input-start",
628
+ id: toolCall2.id,
629
+ toolName: toolCall2.function.name
597
630
  });
598
631
  }
599
632
  if ((0, import_provider_utils.isParsableJson)(toolCall2.function.arguments)) {
633
+ controller.enqueue({
634
+ type: "tool-input-end",
635
+ id: toolCall2.id
636
+ });
600
637
  controller.enqueue({
601
638
  type: "tool-call",
602
- toolCallType: "function",
603
639
  toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils.generateId)(),
604
640
  toolName: toolCall2.function.name,
605
- args: toolCall2.function.arguments
641
+ input: toolCall2.function.arguments
606
642
  });
607
643
  toolCall2.hasFinished = true;
608
644
  }
@@ -617,19 +653,20 @@ var OpenAICompatibleChatLanguageModel = class {
617
653
  toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
618
654
  }
619
655
  controller.enqueue({
620
- type: "tool-call-delta",
621
- toolCallType: "function",
622
- toolCallId: toolCall.id,
623
- toolName: toolCall.function.name,
624
- argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
656
+ type: "tool-input-delta",
657
+ id: toolCall.id,
658
+ delta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
625
659
  });
626
660
  if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils.isParsableJson)(toolCall.function.arguments)) {
661
+ controller.enqueue({
662
+ type: "tool-input-end",
663
+ id: toolCall.id
664
+ });
627
665
  controller.enqueue({
628
666
  type: "tool-call",
629
- toolCallType: "function",
630
667
  toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils.generateId)(),
631
668
  toolName: toolCall.function.name,
632
- args: toolCall.function.arguments
669
+ input: toolCall.function.arguments
633
670
  });
634
671
  toolCall.hasFinished = true;
635
672
  }
@@ -638,6 +675,12 @@ var OpenAICompatibleChatLanguageModel = class {
638
675
  },
639
676
  flush(controller) {
640
677
  var _a2, _b, _c, _d, _e;
678
+ if (isActiveReasoning) {
679
+ controller.enqueue({ type: "reasoning-end", id: "reasoning-0" });
680
+ }
681
+ if (isActiveText) {
682
+ controller.enqueue({ type: "text-end", id: "txt-0" });
683
+ }
641
684
  const providerMetadata = {
642
685
  [providerOptionsName]: {},
643
686
  ...metadataExtractor == null ? void 0 : metadataExtractor.buildMetadata()
@@ -1011,6 +1054,9 @@ var OpenAICompatibleCompletionLanguageModel = class {
1011
1054
  },
1012
1055
  transform(chunk, controller) {
1013
1056
  var _a, _b, _c;
1057
+ if (options.includeRawChunks) {
1058
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1059
+ }
1014
1060
  if (!chunk.success) {
1015
1061
  finishReason = "error";
1016
1062
  controller.enqueue({ type: "error", error: chunk.error });
@@ -1028,6 +1074,10 @@ var OpenAICompatibleCompletionLanguageModel = class {
1028
1074
  type: "response-metadata",
1029
1075
  ...getResponseMetadata(value)
1030
1076
  });
1077
+ controller.enqueue({
1078
+ type: "text-start",
1079
+ id: "0"
1080
+ });
1031
1081
  }
1032
1082
  if (value.usage != null) {
1033
1083
  usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
@@ -1042,12 +1092,16 @@ var OpenAICompatibleCompletionLanguageModel = class {
1042
1092
  }
1043
1093
  if ((choice == null ? void 0 : choice.text) != null) {
1044
1094
  controller.enqueue({
1045
- type: "text",
1046
- text: choice.text
1095
+ type: "text-delta",
1096
+ id: "0",
1097
+ delta: choice.text
1047
1098
  });
1048
1099
  }
1049
1100
  },
1050
1101
  flush(controller) {
1102
+ if (!isFirstChunk) {
1103
+ controller.enqueue({ type: "text-end", id: "0" });
1104
+ }
1051
1105
  controller.enqueue({
1052
1106
  type: "finish",
1053
1107
  finishReason,