ai 2.2.33 → 2.2.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -795,7 +795,11 @@ declare function AWSBedrockCohereStream(response: AWSBedrockResponse, callbacks?
795
795
  declare function AWSBedrockLlama2Stream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
796
796
  declare function AWSBedrockStream(response: AWSBedrockResponse, callbacks: AIStreamCallbacksAndOptions | undefined, extractTextDeltaFromChunk: (chunk: any) => string): ReadableStream<any>;
797
797
 
798
- declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
798
+ interface StreamChunk {
799
+ text?: string;
800
+ eventType: 'stream-start' | 'search-queries-generation' | 'search-results' | 'text-generation' | 'citation-generation' | 'stream-end';
801
+ }
802
+ declare function CohereStream(reader: Response | AsyncIterable<StreamChunk>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
799
803
 
800
804
  interface GenerateContentResponse {
801
805
  candidates?: GenerateContentCandidate[];
package/dist/index.js CHANGED
@@ -402,10 +402,11 @@ var experimental_StreamData = class {
402
402
  controller.enqueue(encodedData);
403
403
  }
404
404
  if (self.messageAnnotations.length) {
405
- const encodedmessageAnnotations = self.encoder.encode(
405
+ const encodedMessageAnnotations = self.encoder.encode(
406
406
  formatStreamPart("message_annotations", self.messageAnnotations)
407
407
  );
408
- controller.enqueue(encodedmessageAnnotations);
408
+ self.messageAnnotations = [];
409
+ controller.enqueue(encodedMessageAnnotations);
409
410
  }
410
411
  controller.enqueue(chunk);
411
412
  },
@@ -660,14 +661,29 @@ function createParser2(res) {
660
661
  }
661
662
  });
662
663
  }
664
+ async function* streamable2(stream) {
665
+ for await (const chunk of stream) {
666
+ if (chunk.eventType === "text-generation") {
667
+ const text = chunk.text;
668
+ if (text)
669
+ yield text;
670
+ }
671
+ }
672
+ }
663
673
  function CohereStream(reader, callbacks) {
664
- return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
665
- createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
666
- );
674
+ if (Symbol.asyncIterator in reader) {
675
+ return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
676
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
677
+ );
678
+ } else {
679
+ return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
680
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
681
+ );
682
+ }
667
683
  }
668
684
 
669
685
  // streams/google-generative-ai-stream.ts
670
- async function* streamable2(response) {
686
+ async function* streamable3(response) {
671
687
  var _a, _b, _c;
672
688
  for await (const chunk of response.stream) {
673
689
  const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
@@ -681,7 +697,7 @@ async function* streamable2(response) {
681
697
  }
682
698
  }
683
699
  function GoogleGenerativeAIStream(response, cb) {
684
- return readableFromAsyncIterable(streamable2(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
700
+ return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
685
701
  }
686
702
 
687
703
  // streams/huggingface-stream.ts
@@ -780,7 +796,7 @@ function parseOpenAIStream() {
780
796
  const extract = chunkToText();
781
797
  return (data) => extract(JSON.parse(data));
782
798
  }
783
- async function* streamable3(stream) {
799
+ async function* streamable4(stream) {
784
800
  const extract = chunkToText();
785
801
  for await (let chunk of stream) {
786
802
  if ("promptFilterResults" in chunk) {
@@ -869,7 +885,7 @@ function OpenAIStream(res, callbacks) {
869
885
  const cb = callbacks;
870
886
  let stream;
871
887
  if (Symbol.asyncIterator in res) {
872
- stream = readableFromAsyncIterable(streamable3(res)).pipeThrough(
888
+ stream = readableFromAsyncIterable(streamable4(res)).pipeThrough(
873
889
  createCallbacksTransformer(
874
890
  (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
875
891
  ...cb,
@@ -1140,6 +1156,11 @@ async function* readDataStream(reader, {
1140
1156
  }
1141
1157
 
1142
1158
  // shared/parse-complex-response.ts
1159
+ function assignAnnotationsToMessage(message, annotations) {
1160
+ if (!message || !annotations || !annotations.length)
1161
+ return message;
1162
+ return { ...message, annotations: [...annotations] };
1163
+ }
1143
1164
  async function parseComplexResponse({
1144
1165
  reader,
1145
1166
  abortControllerRef,
@@ -1152,6 +1173,7 @@ async function parseComplexResponse({
1152
1173
  const prefixMap = {
1153
1174
  data: []
1154
1175
  };
1176
+ let message_annotations = void 0;
1155
1177
  for await (const { type, value } of readDataStream(reader, {
1156
1178
  isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
1157
1179
  })) {
@@ -1170,22 +1192,6 @@ async function parseComplexResponse({
1170
1192
  };
1171
1193
  }
1172
1194
  }
1173
- if (type == "message_annotations") {
1174
- if (prefixMap["text"]) {
1175
- prefixMap["text"] = {
1176
- ...prefixMap["text"],
1177
- annotations: [...prefixMap["text"].annotations || [], ...value]
1178
- };
1179
- } else {
1180
- prefixMap["text"] = {
1181
- id: generateId(),
1182
- role: "assistant",
1183
- content: "",
1184
- annotations: [...value],
1185
- createdAt
1186
- };
1187
- }
1188
- }
1189
1195
  let functionCallMessage = null;
1190
1196
  if (type === "function_call") {
1191
1197
  prefixMap["function_call"] = {
@@ -1212,12 +1218,41 @@ async function parseComplexResponse({
1212
1218
  if (type === "data") {
1213
1219
  prefixMap["data"].push(...value);
1214
1220
  }
1215
- const responseMessage = prefixMap["text"];
1216
- const merged = [
1217
- functionCallMessage,
1218
- toolCallMessage,
1219
- responseMessage
1220
- ].filter(Boolean);
1221
+ let responseMessage = prefixMap["text"];
1222
+ if (type === "message_annotations") {
1223
+ if (!message_annotations) {
1224
+ message_annotations = [...value];
1225
+ } else {
1226
+ message_annotations.push(...value);
1227
+ }
1228
+ functionCallMessage = assignAnnotationsToMessage(
1229
+ prefixMap["function_call"],
1230
+ message_annotations
1231
+ );
1232
+ toolCallMessage = assignAnnotationsToMessage(
1233
+ prefixMap["tool_calls"],
1234
+ message_annotations
1235
+ );
1236
+ responseMessage = assignAnnotationsToMessage(
1237
+ prefixMap["text"],
1238
+ message_annotations
1239
+ );
1240
+ }
1241
+ if (message_annotations == null ? void 0 : message_annotations.length) {
1242
+ const messagePrefixKeys = [
1243
+ "text",
1244
+ "function_call",
1245
+ "tool_calls"
1246
+ ];
1247
+ messagePrefixKeys.forEach((key) => {
1248
+ if (prefixMap[key]) {
1249
+ prefixMap[key].annotations = [...message_annotations];
1250
+ }
1251
+ });
1252
+ }
1253
+ const merged = [functionCallMessage, toolCallMessage, responseMessage].filter(Boolean).map((message) => ({
1254
+ ...assignAnnotationsToMessage(message, message_annotations)
1255
+ }));
1221
1256
  update(merged, [...prefixMap["data"]]);
1222
1257
  }
1223
1258
  onFinish == null ? void 0 : onFinish(prefixMap);