ai 2.2.36 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +4 -1
- package/dist/index.js +27 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +26 -7
- package/dist/index.mjs.map +1 -1
- package/package.json +21 -8
- package/rsc/dist/rsc-client.d.ts +1 -0
- package/rsc/dist/rsc-client.mjs +16 -0
- package/rsc/dist/rsc-client.mjs.map +1 -0
- package/rsc/dist/rsc-server.d.ts +143 -0
- package/rsc/dist/rsc-server.mjs +1112 -0
- package/rsc/dist/rsc-server.mjs.map +1 -0
- package/rsc/dist/rsc-shared.d.ts +46 -0
- package/rsc/dist/rsc-shared.mjs +223 -0
- package/rsc/dist/rsc-shared.mjs.map +1 -0
- package/rsc/dist/rsc-types.d.ts +6 -0
- package/rsc/dist/rsc-types.mjs +1 -0
- package/rsc/dist/rsc-types.mjs.map +1 -0
package/dist/index.mjs
CHANGED
@@ -780,12 +780,30 @@ function LangChainStream(callbacks) {
|
|
780
780
|
};
|
781
781
|
}
|
782
782
|
|
783
|
+
// streams/mistral-stream.ts
|
784
|
+
async function* streamable4(stream) {
|
785
|
+
var _a, _b;
|
786
|
+
for await (const chunk of stream) {
|
787
|
+
const content = (_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content;
|
788
|
+
if (content === void 0 || content === "") {
|
789
|
+
continue;
|
790
|
+
}
|
791
|
+
yield content;
|
792
|
+
}
|
793
|
+
}
|
794
|
+
function MistralStream(response, callbacks) {
|
795
|
+
const stream = readableFromAsyncIterable(streamable4(response));
|
796
|
+
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
797
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
798
|
+
);
|
799
|
+
}
|
800
|
+
|
783
801
|
// streams/openai-stream.ts
|
784
802
|
function parseOpenAIStream() {
|
785
803
|
const extract = chunkToText();
|
786
804
|
return (data) => extract(JSON.parse(data));
|
787
805
|
}
|
788
|
-
async function*
|
806
|
+
async function* streamable5(stream) {
|
789
807
|
const extract = chunkToText();
|
790
808
|
for await (let chunk of stream) {
|
791
809
|
if ("promptFilterResults" in chunk) {
|
@@ -825,7 +843,7 @@ function chunkToText() {
|
|
825
843
|
const trimStartOfStream = trimStartOfStreamHelper();
|
826
844
|
let isFunctionStreamingIn;
|
827
845
|
return (json) => {
|
828
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
846
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
829
847
|
if (isChatCompletionChunk(json)) {
|
830
848
|
const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
|
831
849
|
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
@@ -841,12 +859,12 @@ function chunkToText() {
|
|
841
859
|
}
|
842
860
|
} else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
|
843
861
|
return cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments);
|
844
|
-
} else if ((_k = (_j = delta.tool_calls) == null ? void 0 : _j[0].function) == null ? void 0 :
|
845
|
-
return cleanupArguments((
|
846
|
-
} else if (isFunctionStreamingIn && (((
|
862
|
+
} else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
|
863
|
+
return cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments);
|
864
|
+
} else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
|
847
865
|
isFunctionStreamingIn = false;
|
848
866
|
return '"}}';
|
849
|
-
} else if (isFunctionStreamingIn && ((
|
867
|
+
} else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
|
850
868
|
isFunctionStreamingIn = false;
|
851
869
|
return '"}}]}';
|
852
870
|
}
|
@@ -874,7 +892,7 @@ function OpenAIStream(res, callbacks) {
|
|
874
892
|
const cb = callbacks;
|
875
893
|
let stream;
|
876
894
|
if (Symbol.asyncIterator in res) {
|
877
|
-
stream = readableFromAsyncIterable(
|
895
|
+
stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
|
878
896
|
createCallbacksTransformer(
|
879
897
|
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
880
898
|
...cb,
|
@@ -1380,6 +1398,7 @@ export {
|
|
1380
1398
|
HuggingFaceStream,
|
1381
1399
|
InkeepStream,
|
1382
1400
|
LangChainStream,
|
1401
|
+
MistralStream,
|
1383
1402
|
OpenAIStream,
|
1384
1403
|
ReplicateStream,
|
1385
1404
|
StreamingTextResponse,
|