ai 2.2.32 → 2.2.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +41 -2
- package/dist/index.js +95 -32
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +95 -32
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -3
- package/react/dist/index.d.ts +5 -1
- package/react/dist/index.js +48 -24
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +48 -24
- package/react/dist/index.mjs.map +1 -1
- package/solid/dist/index.js +41 -22
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +41 -22
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.js +41 -22
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +41 -22
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.js +41 -22
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +41 -22
- package/vue/dist/index.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -353,10 +353,11 @@ var experimental_StreamData = class {
|
|
353
353
|
controller.enqueue(encodedData);
|
354
354
|
}
|
355
355
|
if (self.messageAnnotations.length) {
|
356
|
-
const
|
356
|
+
const encodedMessageAnnotations = self.encoder.encode(
|
357
357
|
formatStreamPart("message_annotations", self.messageAnnotations)
|
358
358
|
);
|
359
|
-
|
359
|
+
self.messageAnnotations = [];
|
360
|
+
controller.enqueue(encodedMessageAnnotations);
|
360
361
|
}
|
361
362
|
controller.enqueue(chunk);
|
362
363
|
},
|
@@ -611,14 +612,29 @@ function createParser2(res) {
|
|
611
612
|
}
|
612
613
|
});
|
613
614
|
}
|
615
|
+
async function* streamable2(stream) {
|
616
|
+
for await (const chunk of stream) {
|
617
|
+
if (chunk.eventType === "text-generation") {
|
618
|
+
const text = chunk.text;
|
619
|
+
if (text)
|
620
|
+
yield text;
|
621
|
+
}
|
622
|
+
}
|
623
|
+
}
|
614
624
|
function CohereStream(reader, callbacks) {
|
615
|
-
|
616
|
-
|
617
|
-
|
625
|
+
if (Symbol.asyncIterator in reader) {
|
626
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
627
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
628
|
+
);
|
629
|
+
} else {
|
630
|
+
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
631
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
632
|
+
);
|
633
|
+
}
|
618
634
|
}
|
619
635
|
|
620
636
|
// streams/google-generative-ai-stream.ts
|
621
|
-
async function*
|
637
|
+
async function* streamable3(response) {
|
622
638
|
var _a, _b, _c;
|
623
639
|
for await (const chunk of response.stream) {
|
624
640
|
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
@@ -632,7 +648,7 @@ async function* streamable2(response) {
|
|
632
648
|
}
|
633
649
|
}
|
634
650
|
function GoogleGenerativeAIStream(response, cb) {
|
635
|
-
return readableFromAsyncIterable(
|
651
|
+
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData));
|
636
652
|
}
|
637
653
|
|
638
654
|
// streams/huggingface-stream.ts
|
@@ -731,9 +747,37 @@ function parseOpenAIStream() {
|
|
731
747
|
const extract = chunkToText();
|
732
748
|
return (data) => extract(JSON.parse(data));
|
733
749
|
}
|
734
|
-
async function*
|
750
|
+
async function* streamable4(stream) {
|
735
751
|
const extract = chunkToText();
|
736
|
-
for await (
|
752
|
+
for await (let chunk of stream) {
|
753
|
+
if ("promptFilterResults" in chunk) {
|
754
|
+
chunk = {
|
755
|
+
id: chunk.id,
|
756
|
+
created: chunk.created.getDate(),
|
757
|
+
object: chunk.object,
|
758
|
+
// not exposed by Azure API
|
759
|
+
model: chunk.model,
|
760
|
+
// not exposed by Azure API
|
761
|
+
choices: chunk.choices.map((choice) => {
|
762
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
763
|
+
return {
|
764
|
+
delta: {
|
765
|
+
content: (_a = choice.delta) == null ? void 0 : _a.content,
|
766
|
+
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
767
|
+
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
768
|
+
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
769
|
+
index,
|
770
|
+
id: toolCall.id,
|
771
|
+
function: toolCall.function,
|
772
|
+
type: toolCall.type
|
773
|
+
})) : void 0
|
774
|
+
},
|
775
|
+
finish_reason: choice.finishReason,
|
776
|
+
index: choice.index
|
777
|
+
};
|
778
|
+
})
|
779
|
+
};
|
780
|
+
}
|
737
781
|
const text = extract(chunk);
|
738
782
|
if (text)
|
739
783
|
yield text;
|
@@ -792,7 +836,7 @@ function OpenAIStream(res, callbacks) {
|
|
792
836
|
const cb = callbacks;
|
793
837
|
let stream;
|
794
838
|
if (Symbol.asyncIterator in res) {
|
795
|
-
stream = readableFromAsyncIterable(
|
839
|
+
stream = readableFromAsyncIterable(streamable4(res)).pipeThrough(
|
796
840
|
createCallbacksTransformer(
|
797
841
|
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
798
842
|
...cb,
|
@@ -1063,6 +1107,11 @@ async function* readDataStream(reader, {
|
|
1063
1107
|
}
|
1064
1108
|
|
1065
1109
|
// shared/parse-complex-response.ts
|
1110
|
+
function assignAnnotationsToMessage(message, annotations) {
|
1111
|
+
if (!message || !annotations || !annotations.length)
|
1112
|
+
return message;
|
1113
|
+
return { ...message, annotations: [...annotations] };
|
1114
|
+
}
|
1066
1115
|
async function parseComplexResponse({
|
1067
1116
|
reader,
|
1068
1117
|
abortControllerRef,
|
@@ -1075,6 +1124,7 @@ async function parseComplexResponse({
|
|
1075
1124
|
const prefixMap = {
|
1076
1125
|
data: []
|
1077
1126
|
};
|
1127
|
+
let message_annotations = void 0;
|
1078
1128
|
for await (const { type, value } of readDataStream(reader, {
|
1079
1129
|
isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
|
1080
1130
|
})) {
|
@@ -1093,22 +1143,6 @@ async function parseComplexResponse({
|
|
1093
1143
|
};
|
1094
1144
|
}
|
1095
1145
|
}
|
1096
|
-
if (type == "message_annotations") {
|
1097
|
-
if (prefixMap["text"]) {
|
1098
|
-
prefixMap["text"] = {
|
1099
|
-
...prefixMap["text"],
|
1100
|
-
annotations: [...prefixMap["text"].annotations || [], ...value]
|
1101
|
-
};
|
1102
|
-
} else {
|
1103
|
-
prefixMap["text"] = {
|
1104
|
-
id: generateId(),
|
1105
|
-
role: "assistant",
|
1106
|
-
content: "",
|
1107
|
-
annotations: [...value],
|
1108
|
-
createdAt
|
1109
|
-
};
|
1110
|
-
}
|
1111
|
-
}
|
1112
1146
|
let functionCallMessage = null;
|
1113
1147
|
if (type === "function_call") {
|
1114
1148
|
prefixMap["function_call"] = {
|
@@ -1135,12 +1169,41 @@ async function parseComplexResponse({
|
|
1135
1169
|
if (type === "data") {
|
1136
1170
|
prefixMap["data"].push(...value);
|
1137
1171
|
}
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1172
|
+
let responseMessage = prefixMap["text"];
|
1173
|
+
if (type === "message_annotations") {
|
1174
|
+
if (!message_annotations) {
|
1175
|
+
message_annotations = [...value];
|
1176
|
+
} else {
|
1177
|
+
message_annotations.push(...value);
|
1178
|
+
}
|
1179
|
+
functionCallMessage = assignAnnotationsToMessage(
|
1180
|
+
prefixMap["function_call"],
|
1181
|
+
message_annotations
|
1182
|
+
);
|
1183
|
+
toolCallMessage = assignAnnotationsToMessage(
|
1184
|
+
prefixMap["tool_calls"],
|
1185
|
+
message_annotations
|
1186
|
+
);
|
1187
|
+
responseMessage = assignAnnotationsToMessage(
|
1188
|
+
prefixMap["text"],
|
1189
|
+
message_annotations
|
1190
|
+
);
|
1191
|
+
}
|
1192
|
+
if (message_annotations == null ? void 0 : message_annotations.length) {
|
1193
|
+
const messagePrefixKeys = [
|
1194
|
+
"text",
|
1195
|
+
"function_call",
|
1196
|
+
"tool_calls"
|
1197
|
+
];
|
1198
|
+
messagePrefixKeys.forEach((key) => {
|
1199
|
+
if (prefixMap[key]) {
|
1200
|
+
prefixMap[key].annotations = [...message_annotations];
|
1201
|
+
}
|
1202
|
+
});
|
1203
|
+
}
|
1204
|
+
const merged = [functionCallMessage, toolCallMessage, responseMessage].filter(Boolean).map((message) => ({
|
1205
|
+
...assignAnnotationsToMessage(message, message_annotations)
|
1206
|
+
}));
|
1144
1207
|
update(merged, [...prefixMap["data"]]);
|
1145
1208
|
}
|
1146
1209
|
onFinish == null ? void 0 : onFinish(prefixMap);
|