ai 2.2.25 → 2.2.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +8 -1
- package/dist/index.js +78 -66
- package/dist/index.mjs +78 -66
- package/package.json +14 -11
- package/react/dist/index.d.ts +32 -9
- package/react/dist/index.js +150 -125
- package/react/dist/index.mjs +150 -125
- package/solid/dist/index.d.ts +9 -1
- package/solid/dist/index.js +127 -90
- package/solid/dist/index.mjs +127 -90
- package/svelte/dist/index.d.ts +9 -1
- package/svelte/dist/index.js +125 -90
- package/svelte/dist/index.mjs +125 -90
- package/vue/dist/index.d.ts +14 -1
- package/vue/dist/index.js +358 -99
- package/vue/dist/index.mjs +358 -99
package/dist/index.d.ts
CHANGED
@@ -35,6 +35,7 @@ interface Function {
|
|
35
35
|
*/
|
36
36
|
description?: string;
|
37
37
|
}
|
38
|
+
type IdGenerator = () => string;
|
38
39
|
/**
|
39
40
|
* Shared types between the API and UI packages.
|
40
41
|
*/
|
@@ -115,6 +116,11 @@ type UseChatOptions = {
|
|
115
116
|
* Callback function to be called when an error is encountered.
|
116
117
|
*/
|
117
118
|
onError?: (error: Error) => void;
|
119
|
+
/**
|
120
|
+
* A way to provide a function that is going to be used for ids for messages.
|
121
|
+
* If not provided nanoid is used by default.
|
122
|
+
*/
|
123
|
+
generateId?: IdGenerator;
|
118
124
|
/**
|
119
125
|
* The credentials mode to be used for the fetch request.
|
120
126
|
* Possible values are: 'omit', 'same-origin', 'include'.
|
@@ -692,6 +698,7 @@ declare class experimental_StreamingReactResponse {
|
|
692
698
|
data?: JSONValue[] | undefined;
|
693
699
|
}) => UINode | Promise<UINode>;
|
694
700
|
data?: experimental_StreamData;
|
701
|
+
generateId?: IdGenerator;
|
695
702
|
});
|
696
703
|
}
|
697
704
|
|
@@ -704,4 +711,4 @@ declare function experimental_AssistantResponse({ threadId, messageId }: {
|
|
704
711
|
sendMessage: (message: AssistantMessage) => void;
|
705
712
|
}) => Promise<void>): Response;
|
706
713
|
|
707
|
-
export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
|
714
|
+
export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, IdGenerator, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
|
package/dist/index.js
CHANGED
@@ -863,21 +863,22 @@ async function ReplicateStream(res, cb, options) {
|
|
863
863
|
);
|
864
864
|
}
|
865
865
|
|
866
|
-
// shared/
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
}
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
|
879
|
-
|
880
|
-
|
866
|
+
// shared/read-data-stream.ts
|
867
|
+
var NEWLINE = "\n".charCodeAt(0);
|
868
|
+
function concatChunks(chunks, totalLength) {
|
869
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
870
|
+
let offset = 0;
|
871
|
+
for (const chunk of chunks) {
|
872
|
+
concatenatedChunks.set(chunk, offset);
|
873
|
+
offset += chunk.length;
|
874
|
+
}
|
875
|
+
chunks.length = 0;
|
876
|
+
return concatenatedChunks;
|
877
|
+
}
|
878
|
+
async function* readDataStream(reader, {
|
879
|
+
isAborted
|
880
|
+
} = {}) {
|
881
|
+
const decoder = new TextDecoder();
|
881
882
|
const chunks = [];
|
882
883
|
let totalLength = 0;
|
883
884
|
while (true) {
|
@@ -892,61 +893,70 @@ async function parseComplexResponse({
|
|
892
893
|
if (chunks.length === 0) {
|
893
894
|
break;
|
894
895
|
}
|
895
|
-
|
896
|
-
let offset = 0;
|
897
|
-
for (const chunk of chunks) {
|
898
|
-
concatenatedChunks.set(chunk, offset);
|
899
|
-
offset += chunk.length;
|
900
|
-
}
|
901
|
-
chunks.length = 0;
|
896
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
902
897
|
totalLength = 0;
|
903
|
-
const
|
904
|
-
|
905
|
-
|
906
|
-
"Invalid response format. Complex mode was set but the response is a string. This should never happen."
|
907
|
-
);
|
898
|
+
const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
|
899
|
+
for (const streamPart of streamParts2) {
|
900
|
+
yield streamPart;
|
908
901
|
}
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
902
|
+
if (isAborted == null ? void 0 : isAborted()) {
|
903
|
+
reader.cancel();
|
904
|
+
break;
|
905
|
+
}
|
906
|
+
}
|
907
|
+
}
|
908
|
+
|
909
|
+
// shared/parse-complex-response.ts
|
910
|
+
async function parseComplexResponse({
|
911
|
+
reader,
|
912
|
+
abortControllerRef,
|
913
|
+
update,
|
914
|
+
onFinish,
|
915
|
+
generateId = nanoid,
|
916
|
+
getCurrentDate = () => /* @__PURE__ */ new Date()
|
917
|
+
}) {
|
918
|
+
const createdAt = getCurrentDate();
|
919
|
+
const prefixMap = {
|
920
|
+
data: []
|
921
|
+
};
|
922
|
+
for await (const { type, value } of readDataStream(reader, {
|
923
|
+
isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
|
924
|
+
})) {
|
925
|
+
if (type === "text") {
|
926
|
+
if (prefixMap["text"]) {
|
927
|
+
prefixMap["text"] = {
|
928
|
+
...prefixMap["text"],
|
929
|
+
content: (prefixMap["text"].content || "") + value
|
930
|
+
};
|
931
|
+
} else {
|
932
|
+
prefixMap["text"] = {
|
928
933
|
id: generateId(),
|
929
934
|
role: "assistant",
|
930
|
-
content:
|
931
|
-
function_call: value2.function_call,
|
932
|
-
name: value2.function_call.name,
|
935
|
+
content: value,
|
933
936
|
createdAt
|
934
937
|
};
|
935
|
-
functionCallMessage = prefixMap["function_call"];
|
936
|
-
}
|
937
|
-
if (type === "data") {
|
938
|
-
prefixMap["data"].push(...value2);
|
939
|
-
}
|
940
|
-
const responseMessage = prefixMap["text"];
|
941
|
-
const merged = [functionCallMessage, responseMessage].filter(
|
942
|
-
Boolean
|
943
|
-
);
|
944
|
-
update(merged, [...prefixMap["data"]]);
|
945
|
-
if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
|
946
|
-
reader.cancel();
|
947
|
-
break;
|
948
938
|
}
|
949
939
|
}
|
940
|
+
let functionCallMessage = null;
|
941
|
+
if (type === "function_call") {
|
942
|
+
prefixMap["function_call"] = {
|
943
|
+
id: generateId(),
|
944
|
+
role: "assistant",
|
945
|
+
content: "",
|
946
|
+
function_call: value.function_call,
|
947
|
+
name: value.function_call.name,
|
948
|
+
createdAt
|
949
|
+
};
|
950
|
+
functionCallMessage = prefixMap["function_call"];
|
951
|
+
}
|
952
|
+
if (type === "data") {
|
953
|
+
prefixMap["data"].push(...value);
|
954
|
+
}
|
955
|
+
const responseMessage = prefixMap["text"];
|
956
|
+
const merged = [functionCallMessage, responseMessage].filter(
|
957
|
+
Boolean
|
958
|
+
);
|
959
|
+
update(merged, [...prefixMap["data"]]);
|
950
960
|
}
|
951
961
|
onFinish == null ? void 0 : onFinish(prefixMap);
|
952
962
|
return {
|
@@ -960,6 +970,7 @@ async function parseComplexResponse({
|
|
960
970
|
// streams/streaming-react-response.ts
|
961
971
|
var experimental_StreamingReactResponse = class {
|
962
972
|
constructor(res, options) {
|
973
|
+
var _a;
|
963
974
|
let resolveFunc = () => {
|
964
975
|
};
|
965
976
|
let next = new Promise((resolve) => {
|
@@ -973,8 +984,8 @@ var experimental_StreamingReactResponse = class {
|
|
973
984
|
parseComplexResponse({
|
974
985
|
reader: processedStream.getReader(),
|
975
986
|
update: (merged, data) => {
|
976
|
-
var
|
977
|
-
const content2 = (_b = (
|
987
|
+
var _a2, _b, _c;
|
988
|
+
const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
|
978
989
|
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
|
979
990
|
const payload = { ui, content: content2 };
|
980
991
|
const resolvePrevious = resolveFunc;
|
@@ -987,6 +998,7 @@ var experimental_StreamingReactResponse = class {
|
|
987
998
|
});
|
988
999
|
lastPayload = payload;
|
989
1000
|
},
|
1001
|
+
generateId: (_a = options.generateId) != null ? _a : nanoid,
|
990
1002
|
onFinish: () => {
|
991
1003
|
if (lastPayload !== void 0) {
|
992
1004
|
resolveFunc({
|
@@ -1002,12 +1014,12 @@ var experimental_StreamingReactResponse = class {
|
|
1002
1014
|
const decode = createChunkDecoder();
|
1003
1015
|
const reader = res.getReader();
|
1004
1016
|
async function readChunk() {
|
1005
|
-
var
|
1017
|
+
var _a2;
|
1006
1018
|
const { done, value } = await reader.read();
|
1007
1019
|
if (!done) {
|
1008
1020
|
content += decode(value);
|
1009
1021
|
}
|
1010
|
-
const ui = ((
|
1022
|
+
const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
|
1011
1023
|
const payload = {
|
1012
1024
|
ui,
|
1013
1025
|
content
|
package/dist/index.mjs
CHANGED
@@ -815,21 +815,22 @@ async function ReplicateStream(res, cb, options) {
|
|
815
815
|
);
|
816
816
|
}
|
817
817
|
|
818
|
-
// shared/
|
819
|
-
|
820
|
-
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
}
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
818
|
+
// shared/read-data-stream.ts
|
819
|
+
var NEWLINE = "\n".charCodeAt(0);
|
820
|
+
function concatChunks(chunks, totalLength) {
|
821
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
822
|
+
let offset = 0;
|
823
|
+
for (const chunk of chunks) {
|
824
|
+
concatenatedChunks.set(chunk, offset);
|
825
|
+
offset += chunk.length;
|
826
|
+
}
|
827
|
+
chunks.length = 0;
|
828
|
+
return concatenatedChunks;
|
829
|
+
}
|
830
|
+
async function* readDataStream(reader, {
|
831
|
+
isAborted
|
832
|
+
} = {}) {
|
833
|
+
const decoder = new TextDecoder();
|
833
834
|
const chunks = [];
|
834
835
|
let totalLength = 0;
|
835
836
|
while (true) {
|
@@ -844,61 +845,70 @@ async function parseComplexResponse({
|
|
844
845
|
if (chunks.length === 0) {
|
845
846
|
break;
|
846
847
|
}
|
847
|
-
|
848
|
-
let offset = 0;
|
849
|
-
for (const chunk of chunks) {
|
850
|
-
concatenatedChunks.set(chunk, offset);
|
851
|
-
offset += chunk.length;
|
852
|
-
}
|
853
|
-
chunks.length = 0;
|
848
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
854
849
|
totalLength = 0;
|
855
|
-
const
|
856
|
-
|
857
|
-
|
858
|
-
"Invalid response format. Complex mode was set but the response is a string. This should never happen."
|
859
|
-
);
|
850
|
+
const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
|
851
|
+
for (const streamPart of streamParts2) {
|
852
|
+
yield streamPart;
|
860
853
|
}
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
|
879
|
-
|
854
|
+
if (isAborted == null ? void 0 : isAborted()) {
|
855
|
+
reader.cancel();
|
856
|
+
break;
|
857
|
+
}
|
858
|
+
}
|
859
|
+
}
|
860
|
+
|
861
|
+
// shared/parse-complex-response.ts
|
862
|
+
async function parseComplexResponse({
|
863
|
+
reader,
|
864
|
+
abortControllerRef,
|
865
|
+
update,
|
866
|
+
onFinish,
|
867
|
+
generateId = nanoid,
|
868
|
+
getCurrentDate = () => /* @__PURE__ */ new Date()
|
869
|
+
}) {
|
870
|
+
const createdAt = getCurrentDate();
|
871
|
+
const prefixMap = {
|
872
|
+
data: []
|
873
|
+
};
|
874
|
+
for await (const { type, value } of readDataStream(reader, {
|
875
|
+
isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
|
876
|
+
})) {
|
877
|
+
if (type === "text") {
|
878
|
+
if (prefixMap["text"]) {
|
879
|
+
prefixMap["text"] = {
|
880
|
+
...prefixMap["text"],
|
881
|
+
content: (prefixMap["text"].content || "") + value
|
882
|
+
};
|
883
|
+
} else {
|
884
|
+
prefixMap["text"] = {
|
880
885
|
id: generateId(),
|
881
886
|
role: "assistant",
|
882
|
-
content:
|
883
|
-
function_call: value2.function_call,
|
884
|
-
name: value2.function_call.name,
|
887
|
+
content: value,
|
885
888
|
createdAt
|
886
889
|
};
|
887
|
-
functionCallMessage = prefixMap["function_call"];
|
888
|
-
}
|
889
|
-
if (type === "data") {
|
890
|
-
prefixMap["data"].push(...value2);
|
891
|
-
}
|
892
|
-
const responseMessage = prefixMap["text"];
|
893
|
-
const merged = [functionCallMessage, responseMessage].filter(
|
894
|
-
Boolean
|
895
|
-
);
|
896
|
-
update(merged, [...prefixMap["data"]]);
|
897
|
-
if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
|
898
|
-
reader.cancel();
|
899
|
-
break;
|
900
890
|
}
|
901
891
|
}
|
892
|
+
let functionCallMessage = null;
|
893
|
+
if (type === "function_call") {
|
894
|
+
prefixMap["function_call"] = {
|
895
|
+
id: generateId(),
|
896
|
+
role: "assistant",
|
897
|
+
content: "",
|
898
|
+
function_call: value.function_call,
|
899
|
+
name: value.function_call.name,
|
900
|
+
createdAt
|
901
|
+
};
|
902
|
+
functionCallMessage = prefixMap["function_call"];
|
903
|
+
}
|
904
|
+
if (type === "data") {
|
905
|
+
prefixMap["data"].push(...value);
|
906
|
+
}
|
907
|
+
const responseMessage = prefixMap["text"];
|
908
|
+
const merged = [functionCallMessage, responseMessage].filter(
|
909
|
+
Boolean
|
910
|
+
);
|
911
|
+
update(merged, [...prefixMap["data"]]);
|
902
912
|
}
|
903
913
|
onFinish == null ? void 0 : onFinish(prefixMap);
|
904
914
|
return {
|
@@ -912,6 +922,7 @@ async function parseComplexResponse({
|
|
912
922
|
// streams/streaming-react-response.ts
|
913
923
|
var experimental_StreamingReactResponse = class {
|
914
924
|
constructor(res, options) {
|
925
|
+
var _a;
|
915
926
|
let resolveFunc = () => {
|
916
927
|
};
|
917
928
|
let next = new Promise((resolve) => {
|
@@ -925,8 +936,8 @@ var experimental_StreamingReactResponse = class {
|
|
925
936
|
parseComplexResponse({
|
926
937
|
reader: processedStream.getReader(),
|
927
938
|
update: (merged, data) => {
|
928
|
-
var
|
929
|
-
const content2 = (_b = (
|
939
|
+
var _a2, _b, _c;
|
940
|
+
const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
|
930
941
|
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
|
931
942
|
const payload = { ui, content: content2 };
|
932
943
|
const resolvePrevious = resolveFunc;
|
@@ -939,6 +950,7 @@ var experimental_StreamingReactResponse = class {
|
|
939
950
|
});
|
940
951
|
lastPayload = payload;
|
941
952
|
},
|
953
|
+
generateId: (_a = options.generateId) != null ? _a : nanoid,
|
942
954
|
onFinish: () => {
|
943
955
|
if (lastPayload !== void 0) {
|
944
956
|
resolveFunc({
|
@@ -954,12 +966,12 @@ var experimental_StreamingReactResponse = class {
|
|
954
966
|
const decode = createChunkDecoder();
|
955
967
|
const reader = res.getReader();
|
956
968
|
async function readChunk() {
|
957
|
-
var
|
969
|
+
var _a2;
|
958
970
|
const { done, value } = await reader.read();
|
959
971
|
if (!done) {
|
960
972
|
content += decode(value);
|
961
973
|
}
|
962
|
-
const ui = ((
|
974
|
+
const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
|
963
975
|
const payload = {
|
964
976
|
ui,
|
965
977
|
content
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.2.
|
3
|
+
"version": "2.2.27",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -64,27 +64,30 @@
|
|
64
64
|
"swrv": "1.0.4"
|
65
65
|
},
|
66
66
|
"devDependencies": {
|
67
|
+
"@anthropic-ai/sdk": "0.10.0",
|
67
68
|
"@aws-sdk/client-bedrock-runtime": "3.451.0",
|
68
69
|
"@edge-runtime/jest-environment": "1.1.0-beta.31",
|
69
70
|
"@huggingface/inference": "2.6.4",
|
70
71
|
"@testing-library/jest-dom": "^6.1.4",
|
71
72
|
"@testing-library/react": "^14.0.0",
|
72
73
|
"@testing-library/user-event": "^14.5.1",
|
74
|
+
"@testing-library/vue": "^8.0.1",
|
73
75
|
"@types/jest": "29.2.0",
|
74
76
|
"@types/node": "^17.0.12",
|
75
77
|
"@types/react": "^18.2.8",
|
76
78
|
"@types/react-dom": "^18.2.0",
|
79
|
+
"@vitejs/plugin-react": "4.2.0",
|
80
|
+
"@vitejs/plugin-vue": "4.5.0",
|
77
81
|
"eslint": "^7.32.0",
|
78
|
-
"
|
79
|
-
"
|
80
|
-
"
|
81
|
-
"langchain": "0.0.172",
|
82
|
+
"jsdom": "^23.0.0",
|
83
|
+
"langchain": "0.0.196",
|
84
|
+
"msw": "2.0.9",
|
82
85
|
"openai": "4.16.1",
|
83
|
-
"
|
86
|
+
"react-dom": "^18.2.0",
|
84
87
|
"tsup": "^6.7.0",
|
85
88
|
"typescript": "5.1.3",
|
86
|
-
"
|
87
|
-
"
|
89
|
+
"@vercel/ai-tsconfig": "0.0.0",
|
90
|
+
"eslint-config-vercel-ai": "0.0.0"
|
88
91
|
},
|
89
92
|
"peerDependencies": {
|
90
93
|
"react": "^18.2.0",
|
@@ -135,8 +138,8 @@
|
|
135
138
|
"type-check": "tsc --noEmit",
|
136
139
|
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
137
140
|
"test": "pnpm test:node && pnpm test:edge && pnpm test:ui",
|
138
|
-
"test:edge": "
|
139
|
-
"test:node": "
|
140
|
-
"test:ui": "
|
141
|
+
"test:edge": "vitest --config vitest.edge.config.js --run",
|
142
|
+
"test:node": "vitest --config vitest.node.config.js --run",
|
143
|
+
"test:ui": "vitest --config vitest.ui.config.js --run"
|
141
144
|
}
|
142
145
|
}
|
package/react/dist/index.d.ts
CHANGED
@@ -33,6 +33,7 @@ interface Function {
|
|
33
33
|
*/
|
34
34
|
description?: string;
|
35
35
|
}
|
36
|
+
type IdGenerator = () => string;
|
36
37
|
/**
|
37
38
|
* Shared types between the API and UI packages.
|
38
39
|
*/
|
@@ -113,6 +114,11 @@ type UseChatOptions = {
|
|
113
114
|
* Callback function to be called when an error is encountered.
|
114
115
|
*/
|
115
116
|
onError?: (error: Error) => void;
|
117
|
+
/**
|
118
|
+
* A way to provide a function that is going to be used for ids for messages.
|
119
|
+
* If not provided nanoid is used by default.
|
120
|
+
*/
|
121
|
+
generateId?: IdGenerator;
|
116
122
|
/**
|
117
123
|
* The credentials mode to be used for the fetch request.
|
118
124
|
* Possible values are: 'omit', 'same-origin', 'include'.
|
@@ -240,6 +246,7 @@ declare class experimental_StreamingReactResponse {
|
|
240
246
|
data?: JSONValue[] | undefined;
|
241
247
|
}) => UINode | Promise<UINode>;
|
242
248
|
data?: experimental_StreamData;
|
249
|
+
generateId?: IdGenerator;
|
243
250
|
});
|
244
251
|
}
|
245
252
|
|
@@ -289,7 +296,7 @@ type StreamingReactResponseAction = (payload: {
|
|
289
296
|
messages: Message[];
|
290
297
|
data?: Record<string, string>;
|
291
298
|
}) => Promise<experimental_StreamingReactResponse>;
|
292
|
-
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: Omit<UseChatOptions, 'api'> & {
|
299
|
+
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, generateId, }?: Omit<UseChatOptions, 'api'> & {
|
293
300
|
api?: string | StreamingReactResponseAction;
|
294
301
|
}): UseChatHelpers;
|
295
302
|
|
@@ -334,24 +341,40 @@ type UseCompletionHelpers = {
|
|
334
341
|
handleSubmit: (e: React.FormEvent<HTMLFormElement>) => void;
|
335
342
|
/** Whether the API request is in progress */
|
336
343
|
isLoading: boolean;
|
344
|
+
/** Additional data added on the server via StreamData */
|
345
|
+
data?: JSONValue[] | undefined;
|
337
346
|
};
|
338
347
|
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
|
339
348
|
|
340
349
|
type AssistantStatus = 'in_progress' | 'awaiting_message';
|
341
|
-
|
342
|
-
|
343
|
-
threadId?: string | undefined;
|
344
|
-
}): {
|
350
|
+
type UseAssistantHelpers = {
|
351
|
+
/** Current messages in the chat */
|
345
352
|
messages: Message[];
|
353
|
+
/** Current thread ID */
|
346
354
|
threadId: string | undefined;
|
355
|
+
/** The current value of the input */
|
347
356
|
input: string;
|
348
|
-
|
349
|
-
|
357
|
+
/** An input/textarea-ready onChange handler to control the value of the input */
|
358
|
+
handleInputChange: (event: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
|
359
|
+
/** Form submission handler to automatically reset input and append a user message */
|
360
|
+
submitMessage: (event?: React.FormEvent<HTMLFormElement>, requestOptions?: {
|
361
|
+
data?: Record<string, string>;
|
362
|
+
}) => Promise<void>;
|
363
|
+
/** Current status of the assistant */
|
350
364
|
status: AssistantStatus;
|
351
|
-
error
|
365
|
+
/** Current error, if any */
|
366
|
+
error: undefined | unknown;
|
367
|
+
};
|
368
|
+
type UseAssistantOptions = {
|
369
|
+
api: string;
|
370
|
+
threadId?: string | undefined;
|
371
|
+
credentials?: RequestCredentials;
|
372
|
+
headers?: Record<string, string> | Headers;
|
373
|
+
body?: object;
|
352
374
|
};
|
375
|
+
declare function experimental_useAssistant({ api, threadId: threadIdParam, credentials, headers, body, }: UseAssistantOptions): UseAssistantHelpers;
|
353
376
|
|
354
|
-
export { AssistantStatus, CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
|
377
|
+
export { AssistantStatus, CreateMessage, Message, UseAssistantHelpers, UseAssistantOptions, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
|
355
378
|
import * as react_jsx_runtime from 'react/jsx-runtime';
|
356
379
|
|
357
380
|
type Props = {
|