@reverbia/sdk 1.0.0-next.20251208112742 → 1.0.0-next.20251209090511
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/expo/index.cjs +1061 -107
- package/dist/expo/index.d.mts +150 -6
- package/dist/expo/index.d.ts +150 -6
- package/dist/expo/index.mjs +1059 -106
- package/dist/react/index.cjs +523 -47635
- package/dist/react/index.d.mts +42 -26
- package/dist/react/index.d.ts +42 -26
- package/dist/react/index.mjs +428 -166
- package/package.json +1 -1
- package/dist/react/chunk-BG7SZT33.mjs +0 -182
- package/dist/react/chunk-FBCDBTKJ.mjs +0 -55
- package/dist/react/chunk-LVWIZZZP.mjs +0 -6
- package/dist/react/chunk-Q6FVPTTV.mjs +0 -28
- package/dist/react/constants-WUGUGYE3.mjs +0 -7
- package/dist/react/generation-NG4QVPCR.mjs +0 -52
- package/dist/react/onnxruntime_binding-5QEF3SUC.node +0 -0
- package/dist/react/onnxruntime_binding-BKPKNEGC.node +0 -0
- package/dist/react/onnxruntime_binding-FMOXGIUT.node +0 -0
- package/dist/react/onnxruntime_binding-OI2KMXC5.node +0 -0
- package/dist/react/onnxruntime_binding-UX44MLAZ.node +0 -0
- package/dist/react/onnxruntime_binding-Y2W7N7WY.node +0 -0
- package/dist/react/selector-XMR5KL3E.mjs +0 -14
- package/dist/react/transformers.node-LUTOZWVQ.mjs +0 -46943
package/dist/react/index.mjs
CHANGED
|
@@ -1,46 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
DEFAULT_TOOL_SELECTOR_MODEL,
|
|
3
|
-
executeTool,
|
|
4
|
-
selectTool
|
|
5
|
-
} from "./chunk-BG7SZT33.mjs";
|
|
6
|
-
import "./chunk-Q6FVPTTV.mjs";
|
|
7
|
-
import "./chunk-FBCDBTKJ.mjs";
|
|
8
|
-
|
|
9
1
|
// src/react/useChat.ts
|
|
10
2
|
import { useCallback, useEffect, useRef, useState } from "react";
|
|
11
3
|
|
|
12
|
-
// src/lib/polyfills/textDecoderStream.ts
|
|
13
|
-
var needsPolyfill = typeof globalThis.TextDecoderStream === "undefined";
|
|
14
|
-
if (needsPolyfill && typeof globalThis.TransformStream !== "undefined") {
|
|
15
|
-
class TextDecoderStreamPolyfill {
|
|
16
|
-
constructor(label = "utf-8", options) {
|
|
17
|
-
this.decoder = new TextDecoder(label, options);
|
|
18
|
-
const decoder = this.decoder;
|
|
19
|
-
this.transform = new TransformStream({
|
|
20
|
-
transform(chunk, controller) {
|
|
21
|
-
const text = decoder.decode(chunk, { stream: true });
|
|
22
|
-
if (text) {
|
|
23
|
-
controller.enqueue(text);
|
|
24
|
-
}
|
|
25
|
-
},
|
|
26
|
-
flush(controller) {
|
|
27
|
-
const text = decoder.decode();
|
|
28
|
-
if (text) {
|
|
29
|
-
controller.enqueue(text);
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
get readable() {
|
|
35
|
-
return this.transform.readable;
|
|
36
|
-
}
|
|
37
|
-
get writable() {
|
|
38
|
-
return this.transform.writable;
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
globalThis.TextDecoderStream = TextDecoderStreamPolyfill;
|
|
42
|
-
}
|
|
43
|
-
|
|
44
4
|
// src/client/core/bodySerializer.gen.ts
|
|
45
5
|
var jsonBodySerializer = {
|
|
46
6
|
bodySerializer: (body) => JSON.stringify(
|
|
@@ -856,24 +816,373 @@ var createClientConfig = (config) => ({
|
|
|
856
816
|
// src/client/client.gen.ts
|
|
857
817
|
var client = createClient(createClientConfig(createConfig()));
|
|
858
818
|
|
|
859
|
-
// src/
|
|
860
|
-
var
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
819
|
+
// src/lib/chat/useChat/utils.ts
|
|
820
|
+
var VALIDATION_ERROR_MESSAGES = {
|
|
821
|
+
messages_required: "messages are required to call sendMessage.",
|
|
822
|
+
model_required: "model is required to call sendMessage.",
|
|
823
|
+
token_getter_required: "Token getter function is required.",
|
|
824
|
+
token_unavailable: "No access token available."
|
|
825
|
+
};
|
|
826
|
+
function validateMessages(messages) {
|
|
827
|
+
if (!messages?.length) {
|
|
828
|
+
return {
|
|
829
|
+
valid: false,
|
|
830
|
+
error: "messages_required",
|
|
831
|
+
message: VALIDATION_ERROR_MESSAGES.messages_required
|
|
832
|
+
};
|
|
833
|
+
}
|
|
834
|
+
return { valid: true };
|
|
835
|
+
}
|
|
836
|
+
function validateModel(model) {
|
|
837
|
+
if (!model) {
|
|
838
|
+
return {
|
|
839
|
+
valid: false,
|
|
840
|
+
error: "model_required",
|
|
841
|
+
message: VALIDATION_ERROR_MESSAGES.model_required
|
|
842
|
+
};
|
|
843
|
+
}
|
|
844
|
+
return { valid: true };
|
|
845
|
+
}
|
|
846
|
+
function validateTokenGetter(getToken) {
|
|
847
|
+
if (!getToken) {
|
|
848
|
+
return {
|
|
849
|
+
valid: false,
|
|
850
|
+
error: "token_getter_required",
|
|
851
|
+
message: VALIDATION_ERROR_MESSAGES.token_getter_required
|
|
852
|
+
};
|
|
853
|
+
}
|
|
854
|
+
return { valid: true };
|
|
855
|
+
}
|
|
856
|
+
function validateToken(token) {
|
|
857
|
+
if (!token) {
|
|
858
|
+
return {
|
|
859
|
+
valid: false,
|
|
860
|
+
error: "token_unavailable",
|
|
861
|
+
message: VALIDATION_ERROR_MESSAGES.token_unavailable
|
|
862
|
+
};
|
|
863
|
+
}
|
|
864
|
+
return { valid: true };
|
|
865
|
+
}
|
|
866
|
+
function createStreamAccumulator() {
|
|
867
|
+
return {
|
|
868
|
+
content: "",
|
|
869
|
+
completionId: "",
|
|
870
|
+
completionModel: "",
|
|
871
|
+
usage: {},
|
|
872
|
+
finishReason: void 0
|
|
874
873
|
};
|
|
875
|
-
|
|
876
|
-
|
|
874
|
+
}
|
|
875
|
+
function processStreamingChunk(chunk, accumulator) {
|
|
876
|
+
if (chunk.id && !accumulator.completionId) {
|
|
877
|
+
accumulator.completionId = chunk.id;
|
|
878
|
+
}
|
|
879
|
+
if (chunk.model && !accumulator.completionModel) {
|
|
880
|
+
accumulator.completionModel = chunk.model;
|
|
881
|
+
}
|
|
882
|
+
if (chunk.usage) {
|
|
883
|
+
accumulator.usage = {
|
|
884
|
+
...accumulator.usage,
|
|
885
|
+
...chunk.usage
|
|
886
|
+
};
|
|
887
|
+
}
|
|
888
|
+
if (chunk.choices?.[0]) {
|
|
889
|
+
const choice = chunk.choices[0];
|
|
890
|
+
if (choice.delta?.content) {
|
|
891
|
+
accumulator.content += choice.delta.content;
|
|
892
|
+
return choice.delta.content;
|
|
893
|
+
}
|
|
894
|
+
if (choice.finish_reason) {
|
|
895
|
+
accumulator.finishReason = choice.finish_reason;
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
return null;
|
|
899
|
+
}
|
|
900
|
+
function buildCompletionResponse(accumulator) {
|
|
901
|
+
return {
|
|
902
|
+
id: accumulator.completionId,
|
|
903
|
+
model: accumulator.completionModel,
|
|
904
|
+
choices: [
|
|
905
|
+
{
|
|
906
|
+
index: 0,
|
|
907
|
+
message: {
|
|
908
|
+
role: "assistant",
|
|
909
|
+
content: [{ type: "text", text: accumulator.content }]
|
|
910
|
+
},
|
|
911
|
+
finish_reason: accumulator.finishReason
|
|
912
|
+
}
|
|
913
|
+
],
|
|
914
|
+
usage: Object.keys(accumulator.usage).length > 0 ? accumulator.usage : void 0
|
|
915
|
+
};
|
|
916
|
+
}
|
|
917
|
+
function createErrorResult(message, onError) {
|
|
918
|
+
if (onError) {
|
|
919
|
+
onError(new Error(message));
|
|
920
|
+
}
|
|
921
|
+
return { data: null, error: message };
|
|
922
|
+
}
|
|
923
|
+
function handleError(err, onError) {
|
|
924
|
+
const errorMsg = err instanceof Error ? err.message : "Failed to send message.";
|
|
925
|
+
const errorObj = err instanceof Error ? err : new Error(errorMsg);
|
|
926
|
+
if (onError) {
|
|
927
|
+
onError(errorObj);
|
|
928
|
+
}
|
|
929
|
+
return { data: null, error: errorMsg };
|
|
930
|
+
}
|
|
931
|
+
function isAbortError(err) {
|
|
932
|
+
return err instanceof Error && err.name === "AbortError";
|
|
933
|
+
}
|
|
934
|
+
function isDoneMarker(chunk) {
|
|
935
|
+
if (typeof chunk === "string") {
|
|
936
|
+
const trimmed = chunk.trim();
|
|
937
|
+
return trimmed === "[DONE]" || trimmed.includes("[DONE]");
|
|
938
|
+
}
|
|
939
|
+
return false;
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
// src/lib/chat/constants.ts
|
|
943
|
+
var DEFAULT_LOCAL_CHAT_MODEL = "onnx-community/Qwen2.5-0.5B-Instruct";
|
|
944
|
+
|
|
945
|
+
// src/lib/chat/pipeline.ts
|
|
946
|
+
var sharedPipeline = null;
|
|
947
|
+
var currentModel = null;
|
|
948
|
+
var currentDevice = null;
|
|
949
|
+
async function getTextGenerationPipeline(options) {
|
|
950
|
+
const { model, device = "wasm", dtype = "q4" } = options;
|
|
951
|
+
if (sharedPipeline && currentModel === model && currentDevice === device) {
|
|
952
|
+
return sharedPipeline;
|
|
953
|
+
}
|
|
954
|
+
const { pipeline, env } = await import("@huggingface/transformers");
|
|
955
|
+
env.allowLocalModels = false;
|
|
956
|
+
if (env.backends?.onnx) {
|
|
957
|
+
env.backends.onnx.logLevel = "fatal";
|
|
958
|
+
}
|
|
959
|
+
console.log(`[Pipeline] Loading model: ${model} on ${device}...`);
|
|
960
|
+
sharedPipeline = await pipeline("text-generation", model, {
|
|
961
|
+
dtype,
|
|
962
|
+
device
|
|
963
|
+
});
|
|
964
|
+
currentModel = model;
|
|
965
|
+
currentDevice = device;
|
|
966
|
+
console.log(`[Pipeline] Model loaded: ${model}`);
|
|
967
|
+
return sharedPipeline;
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
// src/lib/chat/generation.ts
|
|
971
|
+
async function generateLocalChatCompletion(messages, options = {}) {
|
|
972
|
+
const {
|
|
973
|
+
model = DEFAULT_LOCAL_CHAT_MODEL,
|
|
974
|
+
temperature = 0.7,
|
|
975
|
+
max_tokens = 1024,
|
|
976
|
+
top_p = 0.9,
|
|
977
|
+
onToken,
|
|
978
|
+
signal
|
|
979
|
+
} = options;
|
|
980
|
+
const { TextStreamer } = await import("@huggingface/transformers");
|
|
981
|
+
const chatPipeline = await getTextGenerationPipeline({
|
|
982
|
+
model,
|
|
983
|
+
device: "wasm",
|
|
984
|
+
dtype: "q4"
|
|
985
|
+
});
|
|
986
|
+
class CallbackStreamer extends TextStreamer {
|
|
987
|
+
constructor(tokenizer, cb) {
|
|
988
|
+
super(tokenizer, {
|
|
989
|
+
skip_prompt: true,
|
|
990
|
+
skip_special_tokens: true
|
|
991
|
+
});
|
|
992
|
+
this.cb = cb;
|
|
993
|
+
}
|
|
994
|
+
on_finalized_text(text) {
|
|
995
|
+
if (signal?.aborted) {
|
|
996
|
+
throw new Error("AbortError");
|
|
997
|
+
}
|
|
998
|
+
this.cb(text);
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
const streamer = onToken ? new CallbackStreamer(chatPipeline.tokenizer, onToken) : void 0;
|
|
1002
|
+
const output = await chatPipeline(messages, {
|
|
1003
|
+
max_new_tokens: max_tokens,
|
|
1004
|
+
temperature,
|
|
1005
|
+
top_p,
|
|
1006
|
+
streamer,
|
|
1007
|
+
return_full_text: false
|
|
1008
|
+
});
|
|
1009
|
+
return output;
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
// src/lib/tools/selector.ts
|
|
1013
|
+
var DEFAULT_TOOL_SELECTOR_MODEL = "Xenova/LaMini-GPT-124M";
|
|
1014
|
+
function buildToolSelectionPrompt(userMessage, tools) {
|
|
1015
|
+
const toolList = tools.map((t) => `${t.name} (${t.description})`).join("\n");
|
|
1016
|
+
return `Pick the best tool for the task. Reply with ONLY the tool name.
|
|
1017
|
+
|
|
1018
|
+
Available tools:
|
|
1019
|
+
${toolList}
|
|
1020
|
+
none (no tool needed)
|
|
1021
|
+
|
|
1022
|
+
Task: "${userMessage}"
|
|
1023
|
+
|
|
1024
|
+
Best tool:`;
|
|
1025
|
+
}
|
|
1026
|
+
function buildParamExtractionPrompt(userMessage, paramName, paramDescription) {
|
|
1027
|
+
const desc = paramDescription ? ` (${paramDescription})` : "";
|
|
1028
|
+
return `Extract the value for "${paramName}"${desc} from the user message. Reply with ONLY the extracted value, nothing else.
|
|
1029
|
+
|
|
1030
|
+
User message: "${userMessage}"
|
|
1031
|
+
|
|
1032
|
+
Value for ${paramName}:`;
|
|
1033
|
+
}
|
|
1034
|
+
async function extractParams(userMessage, tool, options) {
|
|
1035
|
+
const params = {};
|
|
1036
|
+
if (!tool.parameters || tool.parameters.length === 0) return params;
|
|
1037
|
+
const { model, device } = options;
|
|
1038
|
+
try {
|
|
1039
|
+
const pipeline = await getTextGenerationPipeline({
|
|
1040
|
+
model,
|
|
1041
|
+
device,
|
|
1042
|
+
dtype: "q4"
|
|
1043
|
+
});
|
|
1044
|
+
for (const param of tool.parameters) {
|
|
1045
|
+
const prompt = buildParamExtractionPrompt(
|
|
1046
|
+
userMessage,
|
|
1047
|
+
param.name,
|
|
1048
|
+
param.description
|
|
1049
|
+
);
|
|
1050
|
+
const output = await pipeline(prompt, {
|
|
1051
|
+
max_new_tokens: 32,
|
|
1052
|
+
// Allow reasonable length for parameter values
|
|
1053
|
+
temperature: 0,
|
|
1054
|
+
do_sample: false,
|
|
1055
|
+
return_full_text: false
|
|
1056
|
+
});
|
|
1057
|
+
const generatedText = output?.[0]?.generated_text || output?.generated_text || "";
|
|
1058
|
+
const extractedValue = generatedText.trim().split("\n")[0].trim();
|
|
1059
|
+
console.log(
|
|
1060
|
+
`[Tool Selector] Extracted param "${param.name}":`,
|
|
1061
|
+
extractedValue
|
|
1062
|
+
);
|
|
1063
|
+
params[param.name] = extractedValue || userMessage;
|
|
1064
|
+
}
|
|
1065
|
+
} catch (error) {
|
|
1066
|
+
console.error("[Tool Selector] Error extracting params:", error);
|
|
1067
|
+
for (const param of tool.parameters) {
|
|
1068
|
+
params[param.name] = userMessage;
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
return params;
|
|
1072
|
+
}
|
|
1073
|
+
async function parseToolSelectionResponse(response, tools, userMessage, options) {
|
|
1074
|
+
console.log("[Tool Selector] Raw response:", response);
|
|
1075
|
+
const cleaned = response.toLowerCase().trim().split(/[\s\n,.]+/)[0].replace(/[^a-z0-9_-]/g, "");
|
|
1076
|
+
console.log("[Tool Selector] Parsed tool name:", cleaned);
|
|
1077
|
+
if (cleaned === "none" || cleaned === "null" || cleaned === "") {
|
|
1078
|
+
console.log("[Tool Selector] No tool selected");
|
|
1079
|
+
return { toolSelected: false };
|
|
1080
|
+
}
|
|
1081
|
+
const selectedTool = tools.find((t) => t.name.toLowerCase() === cleaned);
|
|
1082
|
+
if (!selectedTool) {
|
|
1083
|
+
const fuzzyTool = tools.find(
|
|
1084
|
+
(t) => t.name.toLowerCase().includes(cleaned) || cleaned.includes(t.name.toLowerCase())
|
|
1085
|
+
);
|
|
1086
|
+
if (fuzzyTool) {
|
|
1087
|
+
console.log(`[Tool Selector] Fuzzy matched tool: ${fuzzyTool.name}`);
|
|
1088
|
+
const params2 = await extractParams(userMessage, fuzzyTool, options);
|
|
1089
|
+
return {
|
|
1090
|
+
toolSelected: true,
|
|
1091
|
+
toolName: fuzzyTool.name,
|
|
1092
|
+
parameters: params2,
|
|
1093
|
+
confidence: 0.6
|
|
1094
|
+
};
|
|
1095
|
+
}
|
|
1096
|
+
console.warn(`[Tool Selector] Unknown tool: ${cleaned}`);
|
|
1097
|
+
return { toolSelected: false };
|
|
1098
|
+
}
|
|
1099
|
+
const params = await extractParams(userMessage, selectedTool, options);
|
|
1100
|
+
console.log(`[Tool Selector] Selected tool: ${selectedTool.name}`, params);
|
|
1101
|
+
return {
|
|
1102
|
+
toolSelected: true,
|
|
1103
|
+
toolName: selectedTool.name,
|
|
1104
|
+
parameters: params,
|
|
1105
|
+
confidence: 0.9
|
|
1106
|
+
};
|
|
1107
|
+
}
|
|
1108
|
+
async function selectTool(userMessage, tools, options = {}) {
|
|
1109
|
+
const {
|
|
1110
|
+
model = DEFAULT_TOOL_SELECTOR_MODEL,
|
|
1111
|
+
signal,
|
|
1112
|
+
device = "wasm"
|
|
1113
|
+
} = options;
|
|
1114
|
+
if (!tools.length) {
|
|
1115
|
+
return { toolSelected: false };
|
|
1116
|
+
}
|
|
1117
|
+
console.log(
|
|
1118
|
+
`[Tool Selector] analyzing message: "${userMessage}" with model ${model}`
|
|
1119
|
+
);
|
|
1120
|
+
try {
|
|
1121
|
+
const selectorPipeline = await getTextGenerationPipeline({
|
|
1122
|
+
model,
|
|
1123
|
+
device,
|
|
1124
|
+
dtype: "q4"
|
|
1125
|
+
// Aggressive quantization for speed
|
|
1126
|
+
});
|
|
1127
|
+
const prompt = buildToolSelectionPrompt(userMessage, tools);
|
|
1128
|
+
const output = await selectorPipeline(prompt, {
|
|
1129
|
+
max_new_tokens: 4,
|
|
1130
|
+
// Just need the tool name
|
|
1131
|
+
temperature: 0,
|
|
1132
|
+
// Deterministic
|
|
1133
|
+
do_sample: false,
|
|
1134
|
+
return_full_text: false
|
|
1135
|
+
});
|
|
1136
|
+
if (signal?.aborted) {
|
|
1137
|
+
return { toolSelected: false };
|
|
1138
|
+
}
|
|
1139
|
+
const generatedText = output?.[0]?.generated_text || output?.generated_text || "";
|
|
1140
|
+
return await parseToolSelectionResponse(generatedText, tools, userMessage, {
|
|
1141
|
+
model,
|
|
1142
|
+
device
|
|
1143
|
+
});
|
|
1144
|
+
} catch (error) {
|
|
1145
|
+
console.error("[Tool Selector] Error:", error);
|
|
1146
|
+
return { toolSelected: false };
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
var preloadState = /* @__PURE__ */ new Map();
|
|
1150
|
+
async function preloadToolSelectorModel(options = {}) {
|
|
1151
|
+
const { model = DEFAULT_TOOL_SELECTOR_MODEL, device = "wasm" } = options;
|
|
1152
|
+
const existing = preloadState.get(model);
|
|
1153
|
+
if (existing) {
|
|
1154
|
+
return existing.promise;
|
|
1155
|
+
}
|
|
1156
|
+
console.log(`[Tool Selector] Preloading model: ${model}`);
|
|
1157
|
+
const promise = getTextGenerationPipeline({
|
|
1158
|
+
model,
|
|
1159
|
+
device,
|
|
1160
|
+
dtype: "q4"
|
|
1161
|
+
}).then(() => {
|
|
1162
|
+
console.log(`[Tool Selector] Model preloaded: ${model}`);
|
|
1163
|
+
}).catch((error) => {
|
|
1164
|
+
console.warn("[Tool Selector] Failed to preload model:", error);
|
|
1165
|
+
});
|
|
1166
|
+
preloadState.set(model, { promise, attempted: true });
|
|
1167
|
+
return promise;
|
|
1168
|
+
}
|
|
1169
|
+
async function executeTool(tool, params) {
|
|
1170
|
+
try {
|
|
1171
|
+
console.log(
|
|
1172
|
+
`[Tool Selector] Executing tool ${tool.name} with params:`,
|
|
1173
|
+
params
|
|
1174
|
+
);
|
|
1175
|
+
const result = await tool.execute(params);
|
|
1176
|
+
console.log(`[Tool Selector] Tool ${tool.name} execution result:`, result);
|
|
1177
|
+
return { success: true, result };
|
|
1178
|
+
} catch (error) {
|
|
1179
|
+
const errorMessage = error instanceof Error ? error.message : "Tool execution failed";
|
|
1180
|
+
console.error(`[Tool Selector] Tool ${tool.name} failed:`, errorMessage);
|
|
1181
|
+
return { success: false, error: errorMessage };
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
// src/react/useChat.ts
|
|
877
1186
|
function useChat(options) {
|
|
878
1187
|
const {
|
|
879
1188
|
getToken,
|
|
@@ -889,7 +1198,6 @@ function useChat(options) {
|
|
|
889
1198
|
} = options || {};
|
|
890
1199
|
const [isLoading, setIsLoading] = useState(false);
|
|
891
1200
|
const [isSelectingTool, setIsSelectingTool] = useState(false);
|
|
892
|
-
const [webFeaturesLoaded, setWebFeaturesLoaded] = useState(false);
|
|
893
1201
|
const abortControllerRef = useRef(null);
|
|
894
1202
|
const stop = useCallback(() => {
|
|
895
1203
|
if (abortControllerRef.current) {
|
|
@@ -906,17 +1214,12 @@ function useChat(options) {
|
|
|
906
1214
|
};
|
|
907
1215
|
}, []);
|
|
908
1216
|
useEffect(() => {
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
}, []);
|
|
913
|
-
useEffect(() => {
|
|
914
|
-
if (!isReactNative && webFeaturesLoaded && webFeatures && tools && tools.length > 0) {
|
|
915
|
-
webFeatures.preloadToolSelectorModel({
|
|
916
|
-
model: toolSelectorModel || webFeatures.DEFAULT_TOOL_SELECTOR_MODEL
|
|
1217
|
+
if (tools && tools.length > 0) {
|
|
1218
|
+
preloadToolSelectorModel({
|
|
1219
|
+
model: toolSelectorModel || DEFAULT_TOOL_SELECTOR_MODEL
|
|
917
1220
|
});
|
|
918
1221
|
}
|
|
919
|
-
}, [tools, toolSelectorModel
|
|
1222
|
+
}, [tools, toolSelectorModel]);
|
|
920
1223
|
const sendMessage = useCallback(
|
|
921
1224
|
async ({
|
|
922
1225
|
messages,
|
|
@@ -924,10 +1227,9 @@ function useChat(options) {
|
|
|
924
1227
|
onData,
|
|
925
1228
|
runTools = true
|
|
926
1229
|
}) => {
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
return { data: null, error: errorMsg };
|
|
1230
|
+
const messagesValidation = validateMessages(messages);
|
|
1231
|
+
if (!messagesValidation.valid) {
|
|
1232
|
+
return createErrorResult(messagesValidation.message, onError);
|
|
931
1233
|
}
|
|
932
1234
|
if (abortControllerRef.current) {
|
|
933
1235
|
abortControllerRef.current.abort();
|
|
@@ -937,27 +1239,23 @@ function useChat(options) {
|
|
|
937
1239
|
setIsLoading(true);
|
|
938
1240
|
let toolExecutionResult;
|
|
939
1241
|
let messagesWithToolContext = messages;
|
|
940
|
-
const
|
|
941
|
-
if (
|
|
1242
|
+
const shouldRunTools = runTools && tools && tools.length > 0;
|
|
1243
|
+
if (shouldRunTools) {
|
|
942
1244
|
const lastUserMessage = [...messages].reverse().find((m) => m.role === "user");
|
|
943
1245
|
if (lastUserMessage?.content) {
|
|
944
1246
|
setIsSelectingTool(true);
|
|
945
1247
|
const contentString = lastUserMessage.content?.map((part) => part.text || "").join("") || "";
|
|
946
1248
|
try {
|
|
947
|
-
const selectionResult = await
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
model: toolSelectorModel || webFeatures.DEFAULT_TOOL_SELECTOR_MODEL,
|
|
952
|
-
signal: abortController.signal
|
|
953
|
-
}
|
|
954
|
-
);
|
|
1249
|
+
const selectionResult = await selectTool(contentString, tools, {
|
|
1250
|
+
model: toolSelectorModel || DEFAULT_TOOL_SELECTOR_MODEL,
|
|
1251
|
+
signal: abortController.signal
|
|
1252
|
+
});
|
|
955
1253
|
if (selectionResult.toolSelected && selectionResult.toolName) {
|
|
956
1254
|
const selectedTool = tools.find(
|
|
957
1255
|
(t) => t.name === selectionResult.toolName
|
|
958
1256
|
);
|
|
959
1257
|
if (selectedTool) {
|
|
960
|
-
const execResult = await
|
|
1258
|
+
const execResult = await executeTool(
|
|
961
1259
|
selectedTool,
|
|
962
1260
|
selectionResult.parameters || {}
|
|
963
1261
|
);
|
|
@@ -1005,31 +1303,38 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1005
1303
|
}
|
|
1006
1304
|
}
|
|
1007
1305
|
} catch (err) {
|
|
1306
|
+
if (isAbortError(err)) {
|
|
1307
|
+
setIsLoading(false);
|
|
1308
|
+
setIsSelectingTool(false);
|
|
1309
|
+
return {
|
|
1310
|
+
data: null,
|
|
1311
|
+
error: "Request aborted",
|
|
1312
|
+
toolExecution: toolExecutionResult
|
|
1313
|
+
};
|
|
1314
|
+
}
|
|
1008
1315
|
console.warn("Tool selection error:", err);
|
|
1009
1316
|
} finally {
|
|
1010
1317
|
setIsSelectingTool(false);
|
|
1011
1318
|
}
|
|
1012
1319
|
}
|
|
1013
1320
|
}
|
|
1321
|
+
if (abortController.signal.aborted) {
|
|
1322
|
+
setIsLoading(false);
|
|
1323
|
+
return {
|
|
1324
|
+
data: null,
|
|
1325
|
+
error: "Request aborted",
|
|
1326
|
+
toolExecution: toolExecutionResult
|
|
1327
|
+
};
|
|
1328
|
+
}
|
|
1014
1329
|
try {
|
|
1015
1330
|
if (chatProvider === "local") {
|
|
1016
|
-
if (isReactNative || !webFeaturesLoaded || !webFeatures) {
|
|
1017
|
-
const errorMsg = 'Local chat provider is not available in React Native. Use chatProvider: "api" instead.';
|
|
1018
|
-
setIsLoading(false);
|
|
1019
|
-
if (onError) onError(new Error(errorMsg));
|
|
1020
|
-
return {
|
|
1021
|
-
data: null,
|
|
1022
|
-
error: errorMsg,
|
|
1023
|
-
toolExecution: toolExecutionResult
|
|
1024
|
-
};
|
|
1025
|
-
}
|
|
1026
1331
|
let accumulatedContent = "";
|
|
1027
|
-
const usedModel = localModel ||
|
|
1332
|
+
const usedModel = localModel || DEFAULT_LOCAL_CHAT_MODEL;
|
|
1028
1333
|
const formattedMessages = messagesWithToolContext.map((m) => ({
|
|
1029
1334
|
role: m.role || "user",
|
|
1030
1335
|
content: m.content?.map((p) => p.text || "").join("") || ""
|
|
1031
1336
|
}));
|
|
1032
|
-
await
|
|
1337
|
+
await generateLocalChatCompletion(formattedMessages, {
|
|
1033
1338
|
model: usedModel,
|
|
1034
1339
|
signal: abortController.signal,
|
|
1035
1340
|
onToken: (token) => {
|
|
@@ -1068,32 +1373,34 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1068
1373
|
toolExecution: toolExecutionResult
|
|
1069
1374
|
};
|
|
1070
1375
|
} else {
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1376
|
+
const modelValidation = validateModel(model);
|
|
1377
|
+
if (!modelValidation.valid) {
|
|
1378
|
+
setIsLoading(false);
|
|
1379
|
+
if (onError) onError(new Error(modelValidation.message));
|
|
1074
1380
|
return {
|
|
1075
1381
|
data: null,
|
|
1076
|
-
error:
|
|
1382
|
+
error: modelValidation.message,
|
|
1077
1383
|
toolExecution: toolExecutionResult
|
|
1078
1384
|
};
|
|
1079
1385
|
}
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1386
|
+
const tokenGetterValidation = validateTokenGetter(getToken);
|
|
1387
|
+
if (!tokenGetterValidation.valid) {
|
|
1388
|
+
setIsLoading(false);
|
|
1389
|
+
if (onError) onError(new Error(tokenGetterValidation.message));
|
|
1083
1390
|
return {
|
|
1084
1391
|
data: null,
|
|
1085
|
-
error:
|
|
1392
|
+
error: tokenGetterValidation.message,
|
|
1086
1393
|
toolExecution: toolExecutionResult
|
|
1087
1394
|
};
|
|
1088
1395
|
}
|
|
1089
1396
|
const token = await getToken();
|
|
1090
|
-
|
|
1091
|
-
|
|
1397
|
+
const tokenValidation = validateToken(token);
|
|
1398
|
+
if (!tokenValidation.valid) {
|
|
1092
1399
|
setIsLoading(false);
|
|
1093
|
-
if (onError) onError(new Error(
|
|
1400
|
+
if (onError) onError(new Error(tokenValidation.message));
|
|
1094
1401
|
return {
|
|
1095
1402
|
data: null,
|
|
1096
|
-
error:
|
|
1403
|
+
error: tokenValidation.message,
|
|
1097
1404
|
toolExecution: toolExecutionResult
|
|
1098
1405
|
};
|
|
1099
1406
|
}
|
|
@@ -1111,62 +1418,23 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1111
1418
|
},
|
|
1112
1419
|
signal: abortController.signal
|
|
1113
1420
|
});
|
|
1114
|
-
|
|
1115
|
-
let completionId = "";
|
|
1116
|
-
let completionModel = "";
|
|
1117
|
-
let accumulatedUsage = {};
|
|
1118
|
-
let finishReason;
|
|
1421
|
+
const accumulator = createStreamAccumulator();
|
|
1119
1422
|
for await (const chunk of sseResult.stream) {
|
|
1120
|
-
if (
|
|
1423
|
+
if (isDoneMarker(chunk)) {
|
|
1121
1424
|
continue;
|
|
1122
1425
|
}
|
|
1123
1426
|
if (chunk && typeof chunk === "object") {
|
|
1124
|
-
const
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
if (
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
if (chunkData.usage) {
|
|
1132
|
-
accumulatedUsage = {
|
|
1133
|
-
...accumulatedUsage,
|
|
1134
|
-
...chunkData.usage
|
|
1135
|
-
};
|
|
1136
|
-
}
|
|
1137
|
-
if (chunkData.choices && Array.isArray(chunkData.choices) && chunkData.choices.length > 0) {
|
|
1138
|
-
const choice = chunkData.choices[0];
|
|
1139
|
-
if (choice.delta?.content) {
|
|
1140
|
-
const content = choice.delta.content;
|
|
1141
|
-
accumulatedContent += content;
|
|
1142
|
-
if (onData) {
|
|
1143
|
-
onData(content);
|
|
1144
|
-
}
|
|
1145
|
-
if (globalOnData) {
|
|
1146
|
-
globalOnData(content);
|
|
1147
|
-
}
|
|
1148
|
-
}
|
|
1149
|
-
if (choice.finish_reason) {
|
|
1150
|
-
finishReason = choice.finish_reason;
|
|
1151
|
-
}
|
|
1427
|
+
const contentDelta = processStreamingChunk(
|
|
1428
|
+
chunk,
|
|
1429
|
+
accumulator
|
|
1430
|
+
);
|
|
1431
|
+
if (contentDelta) {
|
|
1432
|
+
if (onData) onData(contentDelta);
|
|
1433
|
+
if (globalOnData) globalOnData(contentDelta);
|
|
1152
1434
|
}
|
|
1153
1435
|
}
|
|
1154
1436
|
}
|
|
1155
|
-
const completion =
|
|
1156
|
-
id: completionId,
|
|
1157
|
-
model: completionModel,
|
|
1158
|
-
choices: [
|
|
1159
|
-
{
|
|
1160
|
-
index: 0,
|
|
1161
|
-
message: {
|
|
1162
|
-
role: "assistant",
|
|
1163
|
-
content: [{ type: "text", text: accumulatedContent }]
|
|
1164
|
-
},
|
|
1165
|
-
finish_reason: finishReason
|
|
1166
|
-
}
|
|
1167
|
-
],
|
|
1168
|
-
usage: Object.keys(accumulatedUsage).length > 0 ? accumulatedUsage : void 0
|
|
1169
|
-
};
|
|
1437
|
+
const completion = buildCompletionResponse(accumulator);
|
|
1170
1438
|
setIsLoading(false);
|
|
1171
1439
|
if (onFinish) {
|
|
1172
1440
|
onFinish(completion);
|
|
@@ -1178,7 +1446,7 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1178
1446
|
};
|
|
1179
1447
|
}
|
|
1180
1448
|
} catch (err) {
|
|
1181
|
-
if (err
|
|
1449
|
+
if (isAbortError(err)) {
|
|
1182
1450
|
setIsLoading(false);
|
|
1183
1451
|
return {
|
|
1184
1452
|
data: null,
|
|
@@ -1186,15 +1454,10 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1186
1454
|
toolExecution: toolExecutionResult
|
|
1187
1455
|
};
|
|
1188
1456
|
}
|
|
1189
|
-
const
|
|
1190
|
-
const errorObj = err instanceof Error ? err : new Error(errorMsg);
|
|
1457
|
+
const errorResult = handleError(err, onError);
|
|
1191
1458
|
setIsLoading(false);
|
|
1192
|
-
if (onError) {
|
|
1193
|
-
onError(errorObj);
|
|
1194
|
-
}
|
|
1195
1459
|
return {
|
|
1196
|
-
|
|
1197
|
-
error: errorMsg,
|
|
1460
|
+
...errorResult,
|
|
1198
1461
|
toolExecution: toolExecutionResult
|
|
1199
1462
|
};
|
|
1200
1463
|
} finally {
|
|
@@ -1213,8 +1476,7 @@ Please inform the user about this issue and try to help them alternatively.`
|
|
|
1213
1476
|
localModel,
|
|
1214
1477
|
tools,
|
|
1215
1478
|
toolSelectorModel,
|
|
1216
|
-
onToolExecution
|
|
1217
|
-
webFeaturesLoaded
|
|
1479
|
+
onToolExecution
|
|
1218
1480
|
]
|
|
1219
1481
|
);
|
|
1220
1482
|
return {
|
|
@@ -1687,7 +1949,7 @@ var generateEmbeddingForText = async (text, options = {}) => {
|
|
|
1687
1949
|
}
|
|
1688
1950
|
try {
|
|
1689
1951
|
if (!embeddingPipeline) {
|
|
1690
|
-
const { pipeline } = await import("
|
|
1952
|
+
const { pipeline } = await import("@huggingface/transformers");
|
|
1691
1953
|
embeddingPipeline = await pipeline("feature-extraction", model);
|
|
1692
1954
|
}
|
|
1693
1955
|
const output = await embeddingPipeline(text, {
|