@lobehub/chat 1.11.2 → 1.11.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/docs/self-hosting/server-database/vercel.mdx +78 -75
- package/docs/self-hosting/start.zh-CN.mdx +2 -2
- package/docs/usage/providers/siliconcloud.mdx +4 -3
- package/docs/usage/providers/siliconcloud.zh-CN.mdx +0 -1
- package/locales/ar/components.json +60 -0
- package/locales/bg-BG/components.json +60 -0
- package/locales/de-DE/components.json +60 -0
- package/locales/en-US/components.json +60 -0
- package/locales/es-ES/components.json +60 -0
- package/locales/fr-FR/components.json +60 -0
- package/locales/it-IT/components.json +60 -0
- package/locales/ja-JP/components.json +60 -0
- package/locales/ko-KR/components.json +60 -0
- package/locales/nl-NL/components.json +60 -0
- package/locales/pl-PL/components.json +60 -0
- package/locales/pt-BR/components.json +60 -0
- package/locales/ru-RU/components.json +60 -0
- package/locales/tr-TR/components.json +60 -0
- package/locales/vi-VN/components.json +60 -0
- package/locales/zh-CN/components.json +60 -0
- package/locales/zh-TW/components.json +60 -0
- package/package.json +1 -1
- package/src/app/(main)/settings/_layout/Desktop/SideBar.tsx +3 -22
- package/src/components/FileIcon/config.ts +12 -0
- package/src/components/FileIcon/index.tsx +30 -0
- package/src/components/FunctionModal/createModalHooks.ts +46 -0
- package/src/components/FunctionModal/index.ts +1 -0
- package/src/components/FunctionModal/style.tsx +39 -0
- package/src/components/GoBack/index.tsx +46 -0
- package/src/components/PanelTitle/index.tsx +41 -0
- package/src/components/RepoIcon/index.tsx +22 -0
- package/src/components/SidebarHeader/index.tsx +4 -2
- package/src/components/TipGuide/index.tsx +150 -0
- package/src/database/client/models/message.ts +5 -0
- package/src/features/Conversation/Messages/Default.tsx +1 -2
- package/src/libs/agent-runtime/AgentRuntime.ts +9 -2
- package/src/libs/agent-runtime/BaseAI.ts +14 -1
- package/src/libs/agent-runtime/qwen/index.ts +3 -4
- package/src/libs/agent-runtime/types/embeddings.ts +43 -0
- package/src/libs/agent-runtime/types/index.ts +1 -0
- package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.ts +25 -2
- package/src/libs/langchain/file.ts +29 -0
- package/src/libs/langchain/index.ts +1 -0
- package/src/libs/langchain/loaders/code/__tests__/index.test.ts +34 -0
- package/src/libs/langchain/loaders/code/__tests__/long.json +250 -0
- package/src/libs/langchain/loaders/code/__tests__/long.txt +885 -0
- package/src/libs/langchain/loaders/code/index.ts +15 -0
- package/src/libs/langchain/loaders/config.ts +6 -0
- package/src/libs/langchain/loaders/docx/index.ts +13 -0
- package/src/libs/langchain/loaders/index.ts +98 -0
- package/src/libs/langchain/loaders/markdown/__tests__/demo.mdx +325 -0
- package/src/libs/langchain/loaders/markdown/__tests__/index.test.ts +13 -0
- package/src/libs/langchain/loaders/markdown/index.ts +9 -0
- package/src/libs/langchain/loaders/pdf/index.ts +7 -0
- package/src/libs/langchain/loaders/pptx/index.ts +7 -0
- package/src/libs/langchain/loaders/txt/__tests__/index.test.ts +54 -0
- package/src/libs/langchain/loaders/txt/__tests__/long.json +38 -0
- package/src/libs/langchain/loaders/txt/__tests__/pg24022.txt +83 -0
- package/src/libs/langchain/loaders/txt/index.ts +9 -0
- package/src/libs/langchain/types.ts +1 -0
- package/src/libs/unstructured/__tests__/fixtures/image-parse/fast-partition-none-output.json +3258 -0
- package/src/libs/unstructured/__tests__/fixtures/image-parse/fast-partition-none-raw.json +3255 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/auto-partition-basic-output.json +347 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/auto-partition-basic-raw.json +131 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/auto-partition-raw.json +276 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/fast-partition-basic-output.json +1865 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/fast-partition-basic-raw.json +111 -0
- package/src/libs/unstructured/__tests__/fixtures/table-parse/fast-partition-raw.json +1892 -0
- package/src/libs/unstructured/__tests__/index.test.ts +165 -0
- package/src/libs/unstructured/index.ts +166 -0
- package/src/locales/default/components.ts +62 -0
- package/src/utils/colorUtils.ts +19 -0
- package/src/utils/sleep.ts +4 -0
|
@@ -0,0 +1,885 @@
|
|
|
1
|
+
import { copyToClipboard } from '@lobehub/ui';
|
|
2
|
+
import isEqual from 'fast-deep-equal';
|
|
3
|
+
import { produce } from 'immer';
|
|
4
|
+
import { template } from 'lodash-es';
|
|
5
|
+
import { SWRResponse, mutate } from 'swr';
|
|
6
|
+
import { StateCreator } from 'zustand/vanilla';
|
|
7
|
+
|
|
8
|
+
import { chainAnswerWithContext } from '@/chains/answerWithContext';
|
|
9
|
+
import { LOADING_FLAT } from '@/const/message';
|
|
10
|
+
import { TraceEventType, TraceNameMap } from '@/const/trace';
|
|
11
|
+
import { isServerMode } from '@/const/version';
|
|
12
|
+
import { useClientDataSWR } from '@/libs/swr';
|
|
13
|
+
import { chatService } from '@/services/chat';
|
|
14
|
+
import { messageService } from '@/services/message';
|
|
15
|
+
import { topicService } from '@/services/topic';
|
|
16
|
+
import { traceService } from '@/services/trace';
|
|
17
|
+
import { useAgentStore } from '@/store/agent';
|
|
18
|
+
import { agentSelectors } from '@/store/agent/selectors';
|
|
19
|
+
import { chatHelpers } from '@/store/chat/helpers';
|
|
20
|
+
import { messageMapKey } from '@/store/chat/slices/message/utils';
|
|
21
|
+
import { ChatStore } from '@/store/chat/store';
|
|
22
|
+
import { useSessionStore } from '@/store/session';
|
|
23
|
+
import { UploadFileItem } from '@/types/files/upload';
|
|
24
|
+
import {
|
|
25
|
+
ChatMessage,
|
|
26
|
+
ChatMessageError,
|
|
27
|
+
CreateMessageParams,
|
|
28
|
+
MessageToolCall,
|
|
29
|
+
} from '@/types/message';
|
|
30
|
+
import { TraceEventPayloads } from '@/types/trace';
|
|
31
|
+
import { setNamespace } from '@/utils/storeDebug';
|
|
32
|
+
import { nanoid } from '@/utils/uuid';
|
|
33
|
+
|
|
34
|
+
import type { ChatStoreState } from '../../initialState';
|
|
35
|
+
import { chatSelectors, topicSelectors } from '../../selectors';
|
|
36
|
+
import { preventLeavingFn, toggleBooleanList } from '../../utils';
|
|
37
|
+
import { ChatRAGAction, chatRag } from './actions/rag';
|
|
38
|
+
import { MessageDispatch, messagesReducer } from './reducer';
|
|
39
|
+
|
|
40
|
+
const n = setNamespace('m');
|
|
41
|
+
|
|
42
|
+
const SWR_USE_FETCH_MESSAGES = 'SWR_USE_FETCH_MESSAGES';
|
|
43
|
+
|
|
44
|
+
export interface SendMessageParams {
|
|
45
|
+
message: string;
|
|
46
|
+
files?: UploadFileItem[];
|
|
47
|
+
onlyAddUserMessage?: boolean;
|
|
48
|
+
/**
|
|
49
|
+
*
|
|
50
|
+
* https://github.com/lobehub/lobe-chat/pull/2086
|
|
51
|
+
*/
|
|
52
|
+
isWelcomeQuestion?: boolean;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
interface ProcessMessageParams {
|
|
56
|
+
traceId?: string;
|
|
57
|
+
isWelcomeQuestion?: boolean;
|
|
58
|
+
/**
|
|
59
|
+
* the RAG query content, should be embedding and used in the semantic search
|
|
60
|
+
*/
|
|
61
|
+
ragQuery?: string;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export interface ChatMessageAction extends ChatRAGAction {
|
|
65
|
+
// create
|
|
66
|
+
sendMessage: (params: SendMessageParams) => Promise<void>;
|
|
67
|
+
addAIMessage: () => Promise<void>;
|
|
68
|
+
/**
|
|
69
|
+
* regenerate message
|
|
70
|
+
* trace enabled
|
|
71
|
+
* @param id
|
|
72
|
+
*/
|
|
73
|
+
regenerateMessage: (id: string) => Promise<void>;
|
|
74
|
+
|
|
75
|
+
// delete
|
|
76
|
+
/**
|
|
77
|
+
* clear message on the active session
|
|
78
|
+
*/
|
|
79
|
+
clearMessage: () => Promise<void>;
|
|
80
|
+
deleteMessage: (id: string) => Promise<void>;
|
|
81
|
+
deleteToolMessage: (id: string) => Promise<void>;
|
|
82
|
+
delAndRegenerateMessage: (id: string) => Promise<void>;
|
|
83
|
+
clearAllMessages: () => Promise<void>;
|
|
84
|
+
// update
|
|
85
|
+
updateInputMessage: (message: string) => void;
|
|
86
|
+
modifyMessageContent: (id: string, content: string) => Promise<void>;
|
|
87
|
+
// query
|
|
88
|
+
useFetchMessages: (sessionId: string, topicId?: string) => SWRResponse<ChatMessage[]>;
|
|
89
|
+
stopGenerateMessage: () => void;
|
|
90
|
+
copyMessage: (id: string, content: string) => Promise<void>;
|
|
91
|
+
refreshMessages: () => Promise<void>;
|
|
92
|
+
toggleMessageEditing: (id: string, editing: boolean) => void;
|
|
93
|
+
|
|
94
|
+
// ========= ↓ Internal Method ↓ ========== //
|
|
95
|
+
// ========================================== //
|
|
96
|
+
// ========================================== //
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* update message at the frontend point
|
|
100
|
+
* this method will not update messages to database
|
|
101
|
+
*/
|
|
102
|
+
internal_dispatchMessage: (payload: MessageDispatch) => void;
|
|
103
|
+
/**
|
|
104
|
+
* core process of the AI message (include preprocess and postprocess)
|
|
105
|
+
*/
|
|
106
|
+
internal_coreProcessMessage: (
|
|
107
|
+
messages: ChatMessage[],
|
|
108
|
+
parentId: string,
|
|
109
|
+
params?: ProcessMessageParams,
|
|
110
|
+
) => Promise<void>;
|
|
111
|
+
/**
|
|
112
|
+
* the method to fetch the AI message
|
|
113
|
+
*/
|
|
114
|
+
internal_fetchAIChatMessage: (
|
|
115
|
+
messages: ChatMessage[],
|
|
116
|
+
assistantMessageId: string,
|
|
117
|
+
params?: ProcessMessageParams,
|
|
118
|
+
) => Promise<{
|
|
119
|
+
isFunctionCall: boolean;
|
|
120
|
+
traceId?: string;
|
|
121
|
+
}>;
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* update the message content with optimistic update
|
|
125
|
+
* a method used by other action
|
|
126
|
+
*/
|
|
127
|
+
internal_updateMessageContent: (
|
|
128
|
+
id: string,
|
|
129
|
+
content: string,
|
|
130
|
+
toolCalls?: MessageToolCall[],
|
|
131
|
+
) => Promise<void>;
|
|
132
|
+
/**
|
|
133
|
+
* update the message error with optimistic update
|
|
134
|
+
*/
|
|
135
|
+
internal_updateMessageError: (id: string, error: ChatMessageError | null) => Promise<void>;
|
|
136
|
+
/**
|
|
137
|
+
* create a message with optimistic update
|
|
138
|
+
*/
|
|
139
|
+
internal_createMessage: (
|
|
140
|
+
params: CreateMessageParams,
|
|
141
|
+
context?: { tempMessageId?: string; skipRefresh?: boolean },
|
|
142
|
+
) => Promise<string>;
|
|
143
|
+
/**
|
|
144
|
+
* create a temp message for optimistic update
|
|
145
|
+
* otherwise the message will be too slow to show
|
|
146
|
+
*/
|
|
147
|
+
internal_createTmpMessage: (params: CreateMessageParams) => string;
|
|
148
|
+
/**
|
|
149
|
+
* delete the message content with optimistic update
|
|
150
|
+
*/
|
|
151
|
+
internal_deleteMessage: (id: string) => Promise<void>;
|
|
152
|
+
internal_resendMessage: (id: string, traceId?: string) => Promise<void>;
|
|
153
|
+
|
|
154
|
+
internal_fetchMessages: () => Promise<void>;
|
|
155
|
+
internal_traceMessage: (id: string, payload: TraceEventPayloads) => Promise<void>;
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* method to toggle message create loading state
|
|
159
|
+
* the AI message status is creating -> generating
|
|
160
|
+
* other message role like user and tool , only this method need to be called
|
|
161
|
+
*/
|
|
162
|
+
internal_toggleMessageLoading: (loading: boolean, id: string) => void;
|
|
163
|
+
/**
|
|
164
|
+
* method to toggle ai message generating loading
|
|
165
|
+
*/
|
|
166
|
+
internal_toggleChatLoading: (
|
|
167
|
+
loading: boolean,
|
|
168
|
+
id?: string,
|
|
169
|
+
action?: string,
|
|
170
|
+
) => AbortController | undefined;
|
|
171
|
+
/**
|
|
172
|
+
* method to toggle the tool calling loading state
|
|
173
|
+
*/
|
|
174
|
+
internal_toggleToolCallingStreaming: (id: string, streaming: boolean[] | undefined) => void;
|
|
175
|
+
/**
|
|
176
|
+
* helper to toggle the loading state of the array,used by these three toggleXXXLoading
|
|
177
|
+
*/
|
|
178
|
+
internal_toggleLoadingArrays: (
|
|
179
|
+
key: keyof ChatStoreState,
|
|
180
|
+
loading: boolean,
|
|
181
|
+
id?: string,
|
|
182
|
+
action?: string,
|
|
183
|
+
) => AbortController | undefined;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const getAgentConfig = () => agentSelectors.currentAgentConfig(useAgentStore.getState());
|
|
187
|
+
const getAgentChatConfig = () => agentSelectors.currentAgentChatConfig(useAgentStore.getState());
|
|
188
|
+
const hasEnabledKnowledge = () => agentSelectors.hasEnabledKnowledge(useAgentStore.getState());
|
|
189
|
+
|
|
190
|
+
export const chatMessage: StateCreator<
|
|
191
|
+
ChatStore,
|
|
192
|
+
[['zustand/devtools', never]],
|
|
193
|
+
[],
|
|
194
|
+
ChatMessageAction
|
|
195
|
+
> = (set, get, ...rest) => ({
|
|
196
|
+
...chatRag(set, get, ...rest),
|
|
197
|
+
|
|
198
|
+
deleteMessage: async (id) => {
|
|
199
|
+
const message = chatSelectors.getMessageById(id)(get());
|
|
200
|
+
if (!message) return;
|
|
201
|
+
|
|
202
|
+
let ids = [message.id];
|
|
203
|
+
|
|
204
|
+
// if the message is a tool calls, then delete all the related messages
|
|
205
|
+
if (message.tools) {
|
|
206
|
+
const toolMessageIds = message.tools.flatMap((tool) => {
|
|
207
|
+
const messages = chatSelectors
|
|
208
|
+
.currentChats(get())
|
|
209
|
+
.filter((m) => m.tool_call_id === tool.id);
|
|
210
|
+
|
|
211
|
+
return messages.map((m) => m.id);
|
|
212
|
+
});
|
|
213
|
+
ids = ids.concat(toolMessageIds);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
get().internal_dispatchMessage({ type: 'deleteMessages', ids });
|
|
217
|
+
await messageService.removeMessages(ids);
|
|
218
|
+
await get().refreshMessages();
|
|
219
|
+
},
|
|
220
|
+
|
|
221
|
+
deleteToolMessage: async (id) => {
|
|
222
|
+
const message = chatSelectors.getMessageById(id)(get());
|
|
223
|
+
if (!message || message.role !== 'tool') return;
|
|
224
|
+
|
|
225
|
+
const removeToolInAssistantMessage = async () => {
|
|
226
|
+
if (!message.parentId) return;
|
|
227
|
+
await get().internal_removeToolToAssistantMessage(message.parentId, message.tool_call_id);
|
|
228
|
+
};
|
|
229
|
+
|
|
230
|
+
await Promise.all([
|
|
231
|
+
// 1. remove tool message
|
|
232
|
+
get().internal_deleteMessage(id),
|
|
233
|
+
// 2. remove the tool item in the assistant tools
|
|
234
|
+
removeToolInAssistantMessage(),
|
|
235
|
+
]);
|
|
236
|
+
},
|
|
237
|
+
|
|
238
|
+
delAndRegenerateMessage: async (id) => {
|
|
239
|
+
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
|
240
|
+
get().internal_resendMessage(id, traceId);
|
|
241
|
+
get().deleteMessage(id);
|
|
242
|
+
|
|
243
|
+
// trace the delete and regenerate message
|
|
244
|
+
get().internal_traceMessage(id, { eventType: TraceEventType.DeleteAndRegenerateMessage });
|
|
245
|
+
},
|
|
246
|
+
regenerateMessage: async (id: string) => {
|
|
247
|
+
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
|
248
|
+
await get().internal_resendMessage(id, traceId);
|
|
249
|
+
|
|
250
|
+
// trace the delete and regenerate message
|
|
251
|
+
get().internal_traceMessage(id, { eventType: TraceEventType.RegenerateMessage });
|
|
252
|
+
},
|
|
253
|
+
clearMessage: async () => {
|
|
254
|
+
const { activeId, activeTopicId, refreshMessages, refreshTopic, switchTopic } = get();
|
|
255
|
+
|
|
256
|
+
await messageService.removeMessagesByAssistant(activeId, activeTopicId);
|
|
257
|
+
|
|
258
|
+
if (activeTopicId) {
|
|
259
|
+
await topicService.removeTopic(activeTopicId);
|
|
260
|
+
}
|
|
261
|
+
await refreshTopic();
|
|
262
|
+
await refreshMessages();
|
|
263
|
+
|
|
264
|
+
// after remove topic , go back to default topic
|
|
265
|
+
switchTopic();
|
|
266
|
+
},
|
|
267
|
+
clearAllMessages: async () => {
|
|
268
|
+
const { refreshMessages } = get();
|
|
269
|
+
await messageService.removeAllMessages();
|
|
270
|
+
await refreshMessages();
|
|
271
|
+
},
|
|
272
|
+
sendMessage: async ({ message, files, onlyAddUserMessage, isWelcomeQuestion }) => {
|
|
273
|
+
const { internal_coreProcessMessage, activeTopicId, activeId } = get();
|
|
274
|
+
if (!activeId) return;
|
|
275
|
+
|
|
276
|
+
const fileIdList = files?.map((f) => f.id);
|
|
277
|
+
|
|
278
|
+
const hasFile = !!fileIdList && fileIdList.length > 0;
|
|
279
|
+
|
|
280
|
+
// if message is empty or no files, then stop
|
|
281
|
+
if (!message && !hasFile) return;
|
|
282
|
+
|
|
283
|
+
set({ isCreatingMessage: true }, false, 'creatingMessage/start');
|
|
284
|
+
|
|
285
|
+
const newMessage: CreateMessageParams = {
|
|
286
|
+
content: message,
|
|
287
|
+
// if message has attached with files, then add files to message and the agent
|
|
288
|
+
files: fileIdList,
|
|
289
|
+
role: 'user',
|
|
290
|
+
sessionId: activeId,
|
|
291
|
+
// if there is activeTopicId,then add topicId to message
|
|
292
|
+
topicId: activeTopicId,
|
|
293
|
+
};
|
|
294
|
+
|
|
295
|
+
const agentConfig = getAgentChatConfig();
|
|
296
|
+
|
|
297
|
+
let tempMessageId: string | undefined = undefined;
|
|
298
|
+
let newTopicId: string | undefined = undefined;
|
|
299
|
+
|
|
300
|
+
// it should be the default topic, then
|
|
301
|
+
// if autoCreateTopic is enabled, check to whether we need to create a topic
|
|
302
|
+
if (!onlyAddUserMessage && !activeTopicId && agentConfig.enableAutoCreateTopic) {
|
|
303
|
+
// check activeTopic and then auto create topic
|
|
304
|
+
const chats = chatSelectors.currentChats(get());
|
|
305
|
+
|
|
306
|
+
// we will add two messages (user and assistant), so the finial length should +2
|
|
307
|
+
const featureLength = chats.length + 2;
|
|
308
|
+
|
|
309
|
+
// if there is no activeTopicId and the feature length is greater than the threshold
|
|
310
|
+
// then create a new topic and active it
|
|
311
|
+
if (!get().activeTopicId && featureLength >= agentConfig.autoCreateTopicThreshold) {
|
|
312
|
+
// we need to create a temp message for optimistic update
|
|
313
|
+
tempMessageId = get().internal_createTmpMessage(newMessage);
|
|
314
|
+
get().internal_toggleMessageLoading(true, tempMessageId);
|
|
315
|
+
|
|
316
|
+
const topicId = await get().createTopic();
|
|
317
|
+
|
|
318
|
+
if (topicId) {
|
|
319
|
+
newTopicId = topicId;
|
|
320
|
+
newMessage.topicId = topicId;
|
|
321
|
+
|
|
322
|
+
// we need to copy the messages to the new topic or the message will disappear
|
|
323
|
+
const mapKey = chatSelectors.currentChatKey(get());
|
|
324
|
+
const newMaps = {
|
|
325
|
+
...get().messagesMap,
|
|
326
|
+
[messageMapKey(activeId, topicId)]: get().messagesMap[mapKey],
|
|
327
|
+
};
|
|
328
|
+
set({ messagesMap: newMaps }, false, 'internal_copyMessages');
|
|
329
|
+
|
|
330
|
+
// get().internal_dispatchMessage({ type: 'deleteMessage', id: tempMessageId });
|
|
331
|
+
get().internal_toggleMessageLoading(false, tempMessageId);
|
|
332
|
+
|
|
333
|
+
// make the topic loading
|
|
334
|
+
get().internal_updateTopicLoading(topicId, true);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
// update assistant update to make it rerank
|
|
339
|
+
useSessionStore.getState().triggerSessionUpdate(get().activeId);
|
|
340
|
+
|
|
341
|
+
const id = await get().internal_createMessage(newMessage, {
|
|
342
|
+
tempMessageId,
|
|
343
|
+
skipRefresh: !onlyAddUserMessage,
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
// switch to the new topic if create the new topic
|
|
347
|
+
if (!!newTopicId) {
|
|
348
|
+
await get().switchTopic(newTopicId, true);
|
|
349
|
+
await get().internal_fetchMessages();
|
|
350
|
+
|
|
351
|
+
// delete previous messages
|
|
352
|
+
// remove the temp message map
|
|
353
|
+
const newMaps = { ...get().messagesMap, [messageMapKey(activeId, null)]: [] };
|
|
354
|
+
set({ messagesMap: newMaps }, false, 'internal_copyMessages');
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
// if only add user message, then stop
|
|
358
|
+
if (onlyAddUserMessage) {
|
|
359
|
+
set({ isCreatingMessage: false }, false, 'creatingMessage/start');
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
// Get the current messages to generate AI response
|
|
364
|
+
const messages = chatSelectors.currentChats(get());
|
|
365
|
+
const userFiles = chatSelectors.currentUserFiles(get()).map((f) => f.id);
|
|
366
|
+
|
|
367
|
+
await internal_coreProcessMessage(messages, id, {
|
|
368
|
+
isWelcomeQuestion,
|
|
369
|
+
// if there is relative files or enabled knowledge, try with ragQuery
|
|
370
|
+
ragQuery: hasEnabledKnowledge() || userFiles.length > 0 ? message : undefined,
|
|
371
|
+
});
|
|
372
|
+
|
|
373
|
+
set({ isCreatingMessage: false }, false, 'creatingMessage/stop');
|
|
374
|
+
|
|
375
|
+
const summaryTitle = async () => {
|
|
376
|
+
// if autoCreateTopic is false, then stop
|
|
377
|
+
if (!agentConfig.enableAutoCreateTopic) return;
|
|
378
|
+
|
|
379
|
+
// check activeTopic and then auto update topic title
|
|
380
|
+
if (newTopicId) {
|
|
381
|
+
const chats = chatSelectors.currentChats(get());
|
|
382
|
+
await get().summaryTopicTitle(newTopicId, chats);
|
|
383
|
+
return;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
const topic = topicSelectors.currentActiveTopic(get());
|
|
387
|
+
|
|
388
|
+
if (topic && !topic.title) {
|
|
389
|
+
const chats = chatSelectors.currentChats(get());
|
|
390
|
+
await get().summaryTopicTitle(topic.id, chats);
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
// if there is relative files, then add files to agent
|
|
395
|
+
// only available in server mode
|
|
396
|
+
const addFilesToAgent = async () => {
|
|
397
|
+
if (userFiles.length === 0 || !isServerMode) return;
|
|
398
|
+
|
|
399
|
+
await useAgentStore.getState().addFilesToAgent(userFiles, false);
|
|
400
|
+
};
|
|
401
|
+
|
|
402
|
+
await Promise.all([summaryTitle(), addFilesToAgent()]);
|
|
403
|
+
},
|
|
404
|
+
addAIMessage: async () => {
|
|
405
|
+
const { internal_createMessage, updateInputMessage, activeTopicId, activeId, inputMessage } =
|
|
406
|
+
get();
|
|
407
|
+
if (!activeId) return;
|
|
408
|
+
|
|
409
|
+
await internal_createMessage({
|
|
410
|
+
content: inputMessage,
|
|
411
|
+
role: 'assistant',
|
|
412
|
+
sessionId: activeId,
|
|
413
|
+
// if there is activeTopicId,then add topicId to message
|
|
414
|
+
topicId: activeTopicId,
|
|
415
|
+
});
|
|
416
|
+
|
|
417
|
+
updateInputMessage('');
|
|
418
|
+
},
|
|
419
|
+
copyMessage: async (id, content) => {
|
|
420
|
+
await copyToClipboard(content);
|
|
421
|
+
|
|
422
|
+
get().internal_traceMessage(id, { eventType: TraceEventType.CopyMessage });
|
|
423
|
+
},
|
|
424
|
+
toggleMessageEditing: (id, editing) => {
|
|
425
|
+
set(
|
|
426
|
+
{ messageEditingIds: toggleBooleanList(get().messageEditingIds, id, editing) },
|
|
427
|
+
false,
|
|
428
|
+
'toggleMessageEditing',
|
|
429
|
+
);
|
|
430
|
+
},
|
|
431
|
+
stopGenerateMessage: () => {
|
|
432
|
+
const { abortController, internal_toggleChatLoading } = get();
|
|
433
|
+
if (!abortController) return;
|
|
434
|
+
|
|
435
|
+
abortController.abort('canceled');
|
|
436
|
+
|
|
437
|
+
internal_toggleChatLoading(false, undefined, n('stopGenerateMessage') as string);
|
|
438
|
+
},
|
|
439
|
+
|
|
440
|
+
updateInputMessage: (message) => {
|
|
441
|
+
if (isEqual(message, get().inputMessage)) return;
|
|
442
|
+
|
|
443
|
+
set({ inputMessage: message }, false, n('updateInputMessage', message));
|
|
444
|
+
},
|
|
445
|
+
modifyMessageContent: async (id, content) => {
|
|
446
|
+
// tracing the diff of update
|
|
447
|
+
// due to message content will change, so we need send trace before update,or will get wrong data
|
|
448
|
+
get().internal_traceMessage(id, {
|
|
449
|
+
eventType: TraceEventType.ModifyMessage,
|
|
450
|
+
nextContent: content,
|
|
451
|
+
});
|
|
452
|
+
|
|
453
|
+
await get().internal_updateMessageContent(id, content);
|
|
454
|
+
},
|
|
455
|
+
useFetchMessages: (sessionId, activeTopicId) =>
|
|
456
|
+
useClientDataSWR<ChatMessage[]>(
|
|
457
|
+
[SWR_USE_FETCH_MESSAGES, sessionId, activeTopicId],
|
|
458
|
+
async ([, sessionId, topicId]: [string, string, string | undefined]) =>
|
|
459
|
+
messageService.getMessages(sessionId, topicId),
|
|
460
|
+
{
|
|
461
|
+
onSuccess: (messages, key) => {
|
|
462
|
+
const nextMap = {
|
|
463
|
+
...get().messagesMap,
|
|
464
|
+
[messageMapKey(sessionId, activeTopicId)]: messages,
|
|
465
|
+
};
|
|
466
|
+
// no need to update map if the messages have been init and the map is the same
|
|
467
|
+
if (get().messagesInit && isEqual(nextMap, get().messagesMap)) return;
|
|
468
|
+
|
|
469
|
+
set(
|
|
470
|
+
{ messagesInit: true, messagesMap: nextMap },
|
|
471
|
+
false,
|
|
472
|
+
n('useFetchMessages', { messages, queryKey: key }),
|
|
473
|
+
);
|
|
474
|
+
},
|
|
475
|
+
},
|
|
476
|
+
),
|
|
477
|
+
refreshMessages: async () => {
|
|
478
|
+
await mutate([SWR_USE_FETCH_MESSAGES, get().activeId, get().activeTopicId]);
|
|
479
|
+
},
|
|
480
|
+
|
|
481
|
+
// the internal process method of the AI message
|
|
482
|
+
internal_coreProcessMessage: async (originalMessages, userMessageId, params) => {
|
|
483
|
+
const { internal_fetchAIChatMessage, triggerToolCalls, refreshMessages, activeTopicId } = get();
|
|
484
|
+
|
|
485
|
+
// create a new array to avoid the original messages array change
|
|
486
|
+
const messages = [...originalMessages];
|
|
487
|
+
|
|
488
|
+
const { model, provider } = getAgentConfig();
|
|
489
|
+
|
|
490
|
+
let fileChunkIds: string[] | undefined;
|
|
491
|
+
|
|
492
|
+
// go into RAG flow if there is ragQuery flag
|
|
493
|
+
if (params?.ragQuery) {
|
|
494
|
+
// 1. get the relative chunks from semantic search
|
|
495
|
+
const chunks = await get().internal_retrieveChunks(
|
|
496
|
+
userMessageId,
|
|
497
|
+
params?.ragQuery,
|
|
498
|
+
// should skip the last content
|
|
499
|
+
messages.map((m) => m.content).slice(0, messages.length - 1),
|
|
500
|
+
);
|
|
501
|
+
console.log('召回 chunks', chunks);
|
|
502
|
+
|
|
503
|
+
// 2. build the retrieve context messages
|
|
504
|
+
const retrieveContext = chainAnswerWithContext(
|
|
505
|
+
params?.ragQuery,
|
|
506
|
+
chunks.map((c) => c.text as string),
|
|
507
|
+
);
|
|
508
|
+
|
|
509
|
+
// 3. add the retrieve context messages to the messages history
|
|
510
|
+
if (retrieveContext.messages && retrieveContext.messages?.length > 0) {
|
|
511
|
+
// remove the last message due to the query is in the retrieveContext
|
|
512
|
+
messages.pop();
|
|
513
|
+
retrieveContext.messages?.forEach((m) => messages.push(m as ChatMessage));
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
fileChunkIds = chunks.map((c) => c.id);
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
// 2. Add an empty message to place the AI response
|
|
520
|
+
const assistantMessage: CreateMessageParams = {
|
|
521
|
+
role: 'assistant',
|
|
522
|
+
content: LOADING_FLAT,
|
|
523
|
+
fromModel: model,
|
|
524
|
+
fromProvider: provider,
|
|
525
|
+
|
|
526
|
+
parentId: userMessageId,
|
|
527
|
+
sessionId: get().activeId,
|
|
528
|
+
topicId: activeTopicId, // if there is activeTopicId,then add it to topicId
|
|
529
|
+
fileChunkIds,
|
|
530
|
+
ragQueryId: userMessageId,
|
|
531
|
+
};
|
|
532
|
+
|
|
533
|
+
const assistantId = await get().internal_createMessage(assistantMessage);
|
|
534
|
+
|
|
535
|
+
// 3. fetch the AI response
|
|
536
|
+
const { isFunctionCall } = await internal_fetchAIChatMessage(messages, assistantId, params);
|
|
537
|
+
|
|
538
|
+
// 4. if it's the function call message, trigger the function method
|
|
539
|
+
if (isFunctionCall) {
|
|
540
|
+
await refreshMessages();
|
|
541
|
+
await triggerToolCalls(assistantId);
|
|
542
|
+
}
|
|
543
|
+
},
|
|
544
|
+
internal_dispatchMessage: (payload) => {
|
|
545
|
+
const { activeId } = get();
|
|
546
|
+
|
|
547
|
+
if (!activeId) return;
|
|
548
|
+
|
|
549
|
+
const messages = messagesReducer(chatSelectors.currentChats(get()), payload);
|
|
550
|
+
|
|
551
|
+
const nextMap = { ...get().messagesMap, [chatSelectors.currentChatKey(get())]: messages };
|
|
552
|
+
|
|
553
|
+
if (isEqual(nextMap, get().messagesMap)) return;
|
|
554
|
+
|
|
555
|
+
set({ messagesMap: nextMap }, false, { type: `dispatchMessage/${payload.type}`, payload });
|
|
556
|
+
},
|
|
557
|
+
internal_fetchAIChatMessage: async (messages, assistantId, params) => {
|
|
558
|
+
const {
|
|
559
|
+
internal_toggleChatLoading,
|
|
560
|
+
refreshMessages,
|
|
561
|
+
internal_updateMessageContent,
|
|
562
|
+
internal_dispatchMessage,
|
|
563
|
+
internal_toggleToolCallingStreaming,
|
|
564
|
+
} = get();
|
|
565
|
+
|
|
566
|
+
const abortController = internal_toggleChatLoading(
|
|
567
|
+
true,
|
|
568
|
+
assistantId,
|
|
569
|
+
n('generateMessage(start)', { assistantId, messages }) as string,
|
|
570
|
+
);
|
|
571
|
+
|
|
572
|
+
const agentConfig = getAgentConfig();
|
|
573
|
+
const chatConfig = agentConfig.chatConfig;
|
|
574
|
+
|
|
575
|
+
const compiler = template(chatConfig.inputTemplate, { interpolate: /{{([\S\s]+?)}}/g });
|
|
576
|
+
|
|
577
|
+
// ================================== //
|
|
578
|
+
// messages uniformly preprocess //
|
|
579
|
+
// ================================== //
|
|
580
|
+
|
|
581
|
+
// 1. slice messages with config
|
|
582
|
+
let preprocessMsgs = chatHelpers.getSlicedMessagesWithConfig(messages, chatConfig);
|
|
583
|
+
|
|
584
|
+
// 2. replace inputMessage template
|
|
585
|
+
preprocessMsgs = !chatConfig.inputTemplate
|
|
586
|
+
? preprocessMsgs
|
|
587
|
+
: preprocessMsgs.map((m) => {
|
|
588
|
+
if (m.role === 'user') {
|
|
589
|
+
try {
|
|
590
|
+
return { ...m, content: compiler({ text: m.content }) };
|
|
591
|
+
} catch (error) {
|
|
592
|
+
console.error(error);
|
|
593
|
+
|
|
594
|
+
return m;
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
return m;
|
|
599
|
+
});
|
|
600
|
+
|
|
601
|
+
// 3. add systemRole
|
|
602
|
+
if (agentConfig.systemRole) {
|
|
603
|
+
preprocessMsgs.unshift({ content: agentConfig.systemRole, role: 'system' } as ChatMessage);
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
// 4. handle max_tokens
|
|
607
|
+
agentConfig.params.max_tokens = chatConfig.enableMaxTokens
|
|
608
|
+
? agentConfig.params.max_tokens
|
|
609
|
+
: undefined;
|
|
610
|
+
|
|
611
|
+
// 5. handle config for the vision model
|
|
612
|
+
// Due to the gpt-4-vision-preview model's default max_tokens is very small
|
|
613
|
+
// we need to set the max_tokens a larger one.
|
|
614
|
+
if (agentConfig.model === 'gpt-4-vision-preview') {
|
|
615
|
+
/* eslint-disable unicorn/no-lonely-if */
|
|
616
|
+
if (!agentConfig.params.max_tokens)
|
|
617
|
+
// refs: https://github.com/lobehub/lobe-chat/issues/837
|
|
618
|
+
agentConfig.params.max_tokens = 2048;
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
let isFunctionCall = false;
|
|
622
|
+
let msgTraceId: string | undefined;
|
|
623
|
+
let output = '';
|
|
624
|
+
|
|
625
|
+
await chatService.createAssistantMessageStream({
|
|
626
|
+
abortController,
|
|
627
|
+
params: {
|
|
628
|
+
messages: preprocessMsgs,
|
|
629
|
+
model: agentConfig.model,
|
|
630
|
+
provider: agentConfig.provider,
|
|
631
|
+
...agentConfig.params,
|
|
632
|
+
plugins: agentConfig.plugins,
|
|
633
|
+
},
|
|
634
|
+
trace: {
|
|
635
|
+
traceId: params?.traceId,
|
|
636
|
+
sessionId: get().activeId,
|
|
637
|
+
topicId: get().activeTopicId,
|
|
638
|
+
traceName: TraceNameMap.Conversation,
|
|
639
|
+
},
|
|
640
|
+
isWelcomeQuestion: params?.isWelcomeQuestion,
|
|
641
|
+
onErrorHandle: async (error) => {
|
|
642
|
+
await messageService.updateMessageError(assistantId, error);
|
|
643
|
+
await refreshMessages();
|
|
644
|
+
},
|
|
645
|
+
onFinish: async (content, { traceId, observationId, toolCalls }) => {
|
|
646
|
+
// if there is traceId, update it
|
|
647
|
+
if (traceId) {
|
|
648
|
+
msgTraceId = traceId;
|
|
649
|
+
await messageService.updateMessage(assistantId, {
|
|
650
|
+
traceId,
|
|
651
|
+
observationId: observationId ?? undefined,
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
656
|
+
internal_toggleToolCallingStreaming(assistantId, undefined);
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
// update the content after fetch result
|
|
660
|
+
await internal_updateMessageContent(assistantId, content, toolCalls);
|
|
661
|
+
},
|
|
662
|
+
onMessageHandle: async (chunk) => {
|
|
663
|
+
switch (chunk.type) {
|
|
664
|
+
case 'text': {
|
|
665
|
+
output += chunk.text;
|
|
666
|
+
internal_dispatchMessage({
|
|
667
|
+
id: assistantId,
|
|
668
|
+
type: 'updateMessage',
|
|
669
|
+
value: { content: output },
|
|
670
|
+
});
|
|
671
|
+
break;
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
// is this message is just a tool call
|
|
675
|
+
case 'tool_calls': {
|
|
676
|
+
internal_toggleToolCallingStreaming(assistantId, chunk.isAnimationActives);
|
|
677
|
+
internal_dispatchMessage({
|
|
678
|
+
id: assistantId,
|
|
679
|
+
type: 'updateMessage',
|
|
680
|
+
value: { tools: get().internal_transformToolCalls(chunk.tool_calls) },
|
|
681
|
+
});
|
|
682
|
+
isFunctionCall = true;
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
},
|
|
686
|
+
});
|
|
687
|
+
|
|
688
|
+
internal_toggleChatLoading(false, assistantId, n('generateMessage(end)') as string);
|
|
689
|
+
|
|
690
|
+
return {
|
|
691
|
+
isFunctionCall,
|
|
692
|
+
traceId: msgTraceId,
|
|
693
|
+
};
|
|
694
|
+
},
|
|
695
|
+
|
|
696
|
+
internal_resendMessage: async (messageId, traceId) => {
|
|
697
|
+
// 1. 构造所有相关的历史记录
|
|
698
|
+
const chats = chatSelectors.currentChats(get());
|
|
699
|
+
|
|
700
|
+
const currentIndex = chats.findIndex((c) => c.id === messageId);
|
|
701
|
+
if (currentIndex < 0) return;
|
|
702
|
+
|
|
703
|
+
const currentMessage = chats[currentIndex];
|
|
704
|
+
|
|
705
|
+
let contextMessages: ChatMessage[] = [];
|
|
706
|
+
|
|
707
|
+
switch (currentMessage.role) {
|
|
708
|
+
case 'tool':
|
|
709
|
+
case 'user': {
|
|
710
|
+
contextMessages = chats.slice(0, currentIndex + 1);
|
|
711
|
+
break;
|
|
712
|
+
}
|
|
713
|
+
case 'assistant': {
|
|
714
|
+
// 消息是 AI 发出的因此需要找到它的 user 消息
|
|
715
|
+
const userId = currentMessage.parentId;
|
|
716
|
+
const userIndex = chats.findIndex((c) => c.id === userId);
|
|
717
|
+
// 如果消息没有 parentId,那么同 user/function 模式
|
|
718
|
+
contextMessages = chats.slice(0, userIndex < 0 ? currentIndex + 1 : userIndex + 1);
|
|
719
|
+
break;
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
if (contextMessages.length <= 0) return;
|
|
724
|
+
|
|
725
|
+
const { internal_coreProcessMessage } = get();
|
|
726
|
+
|
|
727
|
+
const latestMsg = contextMessages.findLast((s) => s.role === 'user');
|
|
728
|
+
|
|
729
|
+
if (!latestMsg) return;
|
|
730
|
+
|
|
731
|
+
await internal_coreProcessMessage(contextMessages, latestMsg.id, {
|
|
732
|
+
traceId,
|
|
733
|
+
ragQuery: currentMessage.content,
|
|
734
|
+
});
|
|
735
|
+
},
|
|
736
|
+
|
|
737
|
+
internal_updateMessageError: async (id, error) => {
|
|
738
|
+
get().internal_dispatchMessage({ id, type: 'updateMessage', value: { error } });
|
|
739
|
+
await messageService.updateMessage(id, { error });
|
|
740
|
+
await get().refreshMessages();
|
|
741
|
+
},
|
|
742
|
+
internal_updateMessageContent: async (id, content, toolCalls) => {
|
|
743
|
+
const { internal_dispatchMessage, refreshMessages, internal_transformToolCalls } = get();
|
|
744
|
+
|
|
745
|
+
// Due to the async update method and refresh need about 100ms
|
|
746
|
+
// we need to update the message content at the frontend to avoid the update flick
|
|
747
|
+
// refs: https://medium.com/@kyledeguzmanx/what-are-optimistic-updates-483662c3e171
|
|
748
|
+
if (toolCalls) {
|
|
749
|
+
internal_dispatchMessage({
|
|
750
|
+
id,
|
|
751
|
+
type: 'updateMessage',
|
|
752
|
+
value: { tools: internal_transformToolCalls(toolCalls) },
|
|
753
|
+
});
|
|
754
|
+
} else {
|
|
755
|
+
internal_dispatchMessage({ id, type: 'updateMessage', value: { content } });
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
await messageService.updateMessage(id, {
|
|
759
|
+
content,
|
|
760
|
+
tools: toolCalls ? internal_transformToolCalls(toolCalls) : undefined,
|
|
761
|
+
});
|
|
762
|
+
await refreshMessages();
|
|
763
|
+
},
|
|
764
|
+
|
|
765
|
+
internal_createMessage: async (message, context) => {
|
|
766
|
+
const { internal_createTmpMessage, refreshMessages, internal_toggleMessageLoading } = get();
|
|
767
|
+
let tempId = context?.tempMessageId;
|
|
768
|
+
if (!tempId) {
|
|
769
|
+
// use optimistic update to avoid the slow waiting
|
|
770
|
+
tempId = internal_createTmpMessage(message);
|
|
771
|
+
|
|
772
|
+
internal_toggleMessageLoading(true, tempId);
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
const id = await messageService.createMessage(message);
|
|
776
|
+
if (!context?.skipRefresh) {
|
|
777
|
+
await refreshMessages();
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
internal_toggleMessageLoading(false, tempId);
|
|
781
|
+
return id;
|
|
782
|
+
},
|
|
783
|
+
|
|
784
|
+
internal_fetchMessages: async () => {
|
|
785
|
+
const messages = await messageService.getMessages(get().activeId, get().activeTopicId);
|
|
786
|
+
const nextMap = { ...get().messagesMap, [chatSelectors.currentChatKey(get())]: messages };
|
|
787
|
+
// no need to update map if the messages have been init and the map is the same
|
|
788
|
+
if (get().messagesInit && isEqual(nextMap, get().messagesMap)) return;
|
|
789
|
+
|
|
790
|
+
set(
|
|
791
|
+
{ messagesInit: true, messagesMap: nextMap },
|
|
792
|
+
false,
|
|
793
|
+
n('internal_fetchMessages', { messages }),
|
|
794
|
+
);
|
|
795
|
+
},
|
|
796
|
+
internal_createTmpMessage: (message) => {
|
|
797
|
+
const { internal_dispatchMessage } = get();
|
|
798
|
+
|
|
799
|
+
// use optimistic update to avoid the slow waiting
|
|
800
|
+
const tempId = 'tmp_' + nanoid();
|
|
801
|
+
internal_dispatchMessage({ type: 'createMessage', id: tempId, value: message });
|
|
802
|
+
|
|
803
|
+
return tempId;
|
|
804
|
+
},
|
|
805
|
+
internal_deleteMessage: async (id: string) => {
|
|
806
|
+
get().internal_dispatchMessage({ type: 'deleteMessage', id });
|
|
807
|
+
await messageService.removeMessage(id);
|
|
808
|
+
await get().refreshMessages();
|
|
809
|
+
},
|
|
810
|
+
internal_traceMessage: async (id, payload) => {
|
|
811
|
+
// tracing the diff of update
|
|
812
|
+
const message = chatSelectors.getMessageById(id)(get());
|
|
813
|
+
if (!message) return;
|
|
814
|
+
|
|
815
|
+
const traceId = message?.traceId;
|
|
816
|
+
const observationId = message?.observationId;
|
|
817
|
+
|
|
818
|
+
if (traceId && message?.role === 'assistant') {
|
|
819
|
+
traceService
|
|
820
|
+
.traceEvent({ traceId, observationId, content: message.content, ...payload })
|
|
821
|
+
.catch();
|
|
822
|
+
}
|
|
823
|
+
},
|
|
824
|
+
|
|
825
|
+
// ----- Loading ------- //
|
|
826
|
+
internal_toggleMessageLoading: (loading, id) => {
|
|
827
|
+
set(
|
|
828
|
+
{
|
|
829
|
+
messageLoadingIds: toggleBooleanList(get().messageLoadingIds, id, loading),
|
|
830
|
+
},
|
|
831
|
+
false,
|
|
832
|
+
'internal_toggleMessageLoading',
|
|
833
|
+
);
|
|
834
|
+
},
|
|
835
|
+
internal_toggleChatLoading: (loading, id, action) => {
|
|
836
|
+
return get().internal_toggleLoadingArrays('chatLoadingIds', loading, id, action);
|
|
837
|
+
},
|
|
838
|
+
internal_toggleToolCallingStreaming: (id, streaming) => {
|
|
839
|
+
set(
|
|
840
|
+
{
|
|
841
|
+
toolCallingStreamIds: produce(get().toolCallingStreamIds, (draft) => {
|
|
842
|
+
if (!!streaming) {
|
|
843
|
+
draft[id] = streaming;
|
|
844
|
+
} else {
|
|
845
|
+
delete draft[id];
|
|
846
|
+
}
|
|
847
|
+
}),
|
|
848
|
+
},
|
|
849
|
+
|
|
850
|
+
false,
|
|
851
|
+
'toggleToolCallingStreaming',
|
|
852
|
+
);
|
|
853
|
+
},
|
|
854
|
+
internal_toggleLoadingArrays: (key, loading, id, action) => {
|
|
855
|
+
if (loading) {
|
|
856
|
+
window.addEventListener('beforeunload', preventLeavingFn);
|
|
857
|
+
|
|
858
|
+
const abortController = new AbortController();
|
|
859
|
+
set(
|
|
860
|
+
{
|
|
861
|
+
abortController,
|
|
862
|
+
[key]: toggleBooleanList(get()[key] as string[], id!, loading),
|
|
863
|
+
},
|
|
864
|
+
false,
|
|
865
|
+
action,
|
|
866
|
+
);
|
|
867
|
+
|
|
868
|
+
return abortController;
|
|
869
|
+
} else {
|
|
870
|
+
if (!id) {
|
|
871
|
+
set({ abortController: undefined, [key]: [] }, false, action);
|
|
872
|
+
} else
|
|
873
|
+
set(
|
|
874
|
+
{
|
|
875
|
+
abortController: undefined,
|
|
876
|
+
[key]: toggleBooleanList(get()[key] as string[], id, loading),
|
|
877
|
+
},
|
|
878
|
+
false,
|
|
879
|
+
action,
|
|
880
|
+
);
|
|
881
|
+
|
|
882
|
+
window.removeEventListener('beforeunload', preventLeavingFn);
|
|
883
|
+
}
|
|
884
|
+
},
|
|
885
|
+
});
|