@lobehub/chat 1.22.3 → 1.22.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/package.json +1 -1
- package/src/config/modelProviders/github.ts +108 -120
- package/src/config/modelProviders/huggingface.ts +9 -9
- package/src/features/ChatInput/useSend.ts +1 -1
- package/src/store/chat/initialState.ts +3 -0
- package/src/store/chat/slices/aiChat/action.test.ts +946 -0
- package/src/store/chat/slices/aiChat/action.ts +536 -0
- package/src/store/chat/slices/aiChat/initialState.ts +27 -0
- package/src/store/chat/slices/enchance/action.test.ts +1 -1
- package/src/store/chat/slices/message/action.test.ts +49 -900
- package/src/store/chat/slices/message/action.ts +8 -506
- package/src/store/chat/slices/message/initialState.ts +1 -22
- package/src/store/chat/slices/message/selectors.test.ts +1 -1
- package/src/store/chat/slices/message/selectors.ts +1 -1
- package/src/store/chat/slices/plugin/action.test.ts +1 -1
- package/src/store/chat/slices/share/action.test.ts +1 -1
- package/src/store/chat/slices/topic/action.test.ts +1 -1
- package/src/store/chat/store.ts +3 -0
- /package/src/store/chat/slices/{message → aiChat}/actions/rag.ts +0 -0
- /package/src/store/chat/{slices/message/utils.ts → utils/messageMapKey.ts} +0 -0
@@ -2,79 +2,38 @@
|
|
2
2
|
// Disable the auto sort key eslint rule to make the code more logic and readable
|
3
3
|
import { copyToClipboard } from '@lobehub/ui';
|
4
4
|
import isEqual from 'fast-deep-equal';
|
5
|
-
import { produce } from 'immer';
|
6
|
-
import { template } from 'lodash-es';
|
7
5
|
import { SWRResponse, mutate } from 'swr';
|
8
6
|
import { StateCreator } from 'zustand/vanilla';
|
9
7
|
|
10
|
-
import {
|
11
|
-
import { LOADING_FLAT, MESSAGE_CANCEL_FLAT } from '@/const/message';
|
12
|
-
import { TraceEventType, TraceNameMap } from '@/const/trace';
|
13
|
-
import { isServerMode } from '@/const/version';
|
8
|
+
import { TraceEventType } from '@/const/trace';
|
14
9
|
import { useClientDataSWR } from '@/libs/swr';
|
15
|
-
import { chatService } from '@/services/chat';
|
16
10
|
import { messageService } from '@/services/message';
|
17
11
|
import { topicService } from '@/services/topic';
|
18
12
|
import { traceService } from '@/services/trace';
|
19
|
-
import { useAgentStore } from '@/store/agent';
|
20
|
-
import { agentSelectors } from '@/store/agent/selectors';
|
21
|
-
import { chatHelpers } from '@/store/chat/helpers';
|
22
|
-
import { messageMapKey } from '@/store/chat/slices/message/utils';
|
23
13
|
import { ChatStore } from '@/store/chat/store';
|
24
|
-
import {
|
25
|
-
import { UploadFileItem } from '@/types/files/upload';
|
14
|
+
import { messageMapKey } from '@/store/chat/utils/messageMapKey';
|
26
15
|
import {
|
27
16
|
ChatMessage,
|
28
17
|
ChatMessageError,
|
29
18
|
CreateMessageParams,
|
30
19
|
MessageToolCall,
|
31
20
|
} from '@/types/message';
|
32
|
-
import { MessageSemanticSearchChunk } from '@/types/rag';
|
33
21
|
import { TraceEventPayloads } from '@/types/trace';
|
34
22
|
import { setNamespace } from '@/utils/storeDebug';
|
35
23
|
import { nanoid } from '@/utils/uuid';
|
36
24
|
|
37
25
|
import type { ChatStoreState } from '../../initialState';
|
38
|
-
import { chatSelectors
|
26
|
+
import { chatSelectors } from '../../selectors';
|
39
27
|
import { preventLeavingFn, toggleBooleanList } from '../../utils';
|
40
|
-
import { ChatRAGAction, chatRag } from './actions/rag';
|
41
28
|
import { MessageDispatch, messagesReducer } from './reducer';
|
42
29
|
|
43
30
|
const n = setNamespace('m');
|
44
31
|
|
45
32
|
const SWR_USE_FETCH_MESSAGES = 'SWR_USE_FETCH_MESSAGES';
|
46
33
|
|
47
|
-
export interface
|
48
|
-
message: string;
|
49
|
-
files?: UploadFileItem[];
|
50
|
-
onlyAddUserMessage?: boolean;
|
51
|
-
/**
|
52
|
-
*
|
53
|
-
* https://github.com/lobehub/lobe-chat/pull/2086
|
54
|
-
*/
|
55
|
-
isWelcomeQuestion?: boolean;
|
56
|
-
}
|
57
|
-
|
58
|
-
interface ProcessMessageParams {
|
59
|
-
traceId?: string;
|
60
|
-
isWelcomeQuestion?: boolean;
|
61
|
-
/**
|
62
|
-
* the RAG query content, should be embedding and used in the semantic search
|
63
|
-
*/
|
64
|
-
ragQuery?: string;
|
65
|
-
}
|
66
|
-
|
67
|
-
export interface ChatMessageAction extends ChatRAGAction {
|
34
|
+
export interface ChatMessageAction {
|
68
35
|
// create
|
69
|
-
sendMessage: (params: SendMessageParams) => Promise<void>;
|
70
36
|
addAIMessage: () => Promise<void>;
|
71
|
-
/**
|
72
|
-
* regenerate message
|
73
|
-
* trace enabled
|
74
|
-
* @param id
|
75
|
-
*/
|
76
|
-
regenerateMessage: (id: string) => Promise<void>;
|
77
|
-
|
78
37
|
// delete
|
79
38
|
/**
|
80
39
|
* clear message on the active session
|
@@ -82,46 +41,25 @@ export interface ChatMessageAction extends ChatRAGAction {
|
|
82
41
|
clearMessage: () => Promise<void>;
|
83
42
|
deleteMessage: (id: string) => Promise<void>;
|
84
43
|
deleteToolMessage: (id: string) => Promise<void>;
|
85
|
-
delAndRegenerateMessage: (id: string) => Promise<void>;
|
86
44
|
clearAllMessages: () => Promise<void>;
|
87
45
|
// update
|
88
46
|
updateInputMessage: (message: string) => void;
|
89
47
|
modifyMessageContent: (id: string, content: string) => Promise<void>;
|
48
|
+
toggleMessageEditing: (id: string, editing: boolean) => void;
|
90
49
|
// query
|
91
50
|
useFetchMessages: (sessionId: string, topicId?: string) => SWRResponse<ChatMessage[]>;
|
92
|
-
stopGenerateMessage: () => void;
|
93
51
|
copyMessage: (id: string, content: string) => Promise<void>;
|
94
52
|
refreshMessages: () => Promise<void>;
|
95
|
-
toggleMessageEditing: (id: string, editing: boolean) => void;
|
96
53
|
|
97
54
|
// ========= ↓ Internal Method ↓ ========== //
|
98
55
|
// ========================================== //
|
99
56
|
// ========================================== //
|
100
57
|
|
101
58
|
/**
|
102
|
-
* update message at the frontend
|
59
|
+
* update message at the frontend
|
103
60
|
* this method will not update messages to database
|
104
61
|
*/
|
105
62
|
internal_dispatchMessage: (payload: MessageDispatch) => void;
|
106
|
-
/**
|
107
|
-
* core process of the AI message (include preprocess and postprocess)
|
108
|
-
*/
|
109
|
-
internal_coreProcessMessage: (
|
110
|
-
messages: ChatMessage[],
|
111
|
-
parentId: string,
|
112
|
-
params?: ProcessMessageParams,
|
113
|
-
) => Promise<void>;
|
114
|
-
/**
|
115
|
-
* the method to fetch the AI message
|
116
|
-
*/
|
117
|
-
internal_fetchAIChatMessage: (
|
118
|
-
messages: ChatMessage[],
|
119
|
-
assistantMessageId: string,
|
120
|
-
params?: ProcessMessageParams,
|
121
|
-
) => Promise<{
|
122
|
-
isFunctionCall: boolean;
|
123
|
-
traceId?: string;
|
124
|
-
}>;
|
125
63
|
|
126
64
|
/**
|
127
65
|
* update the message content with optimistic update
|
@@ -152,7 +90,6 @@ export interface ChatMessageAction extends ChatRAGAction {
|
|
152
90
|
* delete the message content with optimistic update
|
153
91
|
*/
|
154
92
|
internal_deleteMessage: (id: string) => Promise<void>;
|
155
|
-
internal_resendMessage: (id: string, traceId?: string) => Promise<void>;
|
156
93
|
|
157
94
|
internal_fetchMessages: () => Promise<void>;
|
158
95
|
internal_traceMessage: (id: string, payload: TraceEventPayloads) => Promise<void>;
|
@@ -163,18 +100,7 @@ export interface ChatMessageAction extends ChatRAGAction {
|
|
163
100
|
* other message role like user and tool , only this method need to be called
|
164
101
|
*/
|
165
102
|
internal_toggleMessageLoading: (loading: boolean, id: string) => void;
|
166
|
-
|
167
|
-
* method to toggle ai message generating loading
|
168
|
-
*/
|
169
|
-
internal_toggleChatLoading: (
|
170
|
-
loading: boolean,
|
171
|
-
id?: string,
|
172
|
-
action?: string,
|
173
|
-
) => AbortController | undefined;
|
174
|
-
/**
|
175
|
-
* method to toggle the tool calling loading state
|
176
|
-
*/
|
177
|
-
internal_toggleToolCallingStreaming: (id: string, streaming: boolean[] | undefined) => void;
|
103
|
+
|
178
104
|
/**
|
179
105
|
* helper to toggle the loading state of the array,used by these three toggleXXXLoading
|
180
106
|
*/
|
@@ -186,18 +112,12 @@ export interface ChatMessageAction extends ChatRAGAction {
|
|
186
112
|
) => AbortController | undefined;
|
187
113
|
}
|
188
114
|
|
189
|
-
const getAgentConfig = () => agentSelectors.currentAgentConfig(useAgentStore.getState());
|
190
|
-
const getAgentChatConfig = () => agentSelectors.currentAgentChatConfig(useAgentStore.getState());
|
191
|
-
const getAgentKnowledge = () => agentSelectors.currentEnabledKnowledge(useAgentStore.getState());
|
192
|
-
|
193
115
|
export const chatMessage: StateCreator<
|
194
116
|
ChatStore,
|
195
117
|
[['zustand/devtools', never]],
|
196
118
|
[],
|
197
119
|
ChatMessageAction
|
198
|
-
> = (set, get
|
199
|
-
...chatRag(set, get, ...rest),
|
200
|
-
|
120
|
+
> = (set, get) => ({
|
201
121
|
deleteMessage: async (id) => {
|
202
122
|
const message = chatSelectors.getMessageById(id)(get());
|
203
123
|
if (!message) return;
|
@@ -238,21 +158,6 @@ export const chatMessage: StateCreator<
|
|
238
158
|
]);
|
239
159
|
},
|
240
160
|
|
241
|
-
delAndRegenerateMessage: async (id) => {
|
242
|
-
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
243
|
-
get().internal_resendMessage(id, traceId);
|
244
|
-
get().deleteMessage(id);
|
245
|
-
|
246
|
-
// trace the delete and regenerate message
|
247
|
-
get().internal_traceMessage(id, { eventType: TraceEventType.DeleteAndRegenerateMessage });
|
248
|
-
},
|
249
|
-
regenerateMessage: async (id: string) => {
|
250
|
-
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
251
|
-
await get().internal_resendMessage(id, traceId);
|
252
|
-
|
253
|
-
// trace the delete and regenerate message
|
254
|
-
get().internal_traceMessage(id, { eventType: TraceEventType.RegenerateMessage });
|
255
|
-
},
|
256
161
|
clearMessage: async () => {
|
257
162
|
const { activeId, activeTopicId, refreshMessages, refreshTopic, switchTopic } = get();
|
258
163
|
|
@@ -272,137 +177,6 @@ export const chatMessage: StateCreator<
|
|
272
177
|
await messageService.removeAllMessages();
|
273
178
|
await refreshMessages();
|
274
179
|
},
|
275
|
-
sendMessage: async ({ message, files, onlyAddUserMessage, isWelcomeQuestion }) => {
|
276
|
-
const { internal_coreProcessMessage, activeTopicId, activeId } = get();
|
277
|
-
if (!activeId) return;
|
278
|
-
|
279
|
-
const fileIdList = files?.map((f) => f.id);
|
280
|
-
|
281
|
-
const hasFile = !!fileIdList && fileIdList.length > 0;
|
282
|
-
|
283
|
-
// if message is empty or no files, then stop
|
284
|
-
if (!message && !hasFile) return;
|
285
|
-
|
286
|
-
set({ isCreatingMessage: true }, false, 'creatingMessage/start');
|
287
|
-
|
288
|
-
const newMessage: CreateMessageParams = {
|
289
|
-
content: message,
|
290
|
-
// if message has attached with files, then add files to message and the agent
|
291
|
-
files: fileIdList,
|
292
|
-
role: 'user',
|
293
|
-
sessionId: activeId,
|
294
|
-
// if there is activeTopicId,then add topicId to message
|
295
|
-
topicId: activeTopicId,
|
296
|
-
};
|
297
|
-
|
298
|
-
const agentConfig = getAgentChatConfig();
|
299
|
-
|
300
|
-
let tempMessageId: string | undefined = undefined;
|
301
|
-
let newTopicId: string | undefined = undefined;
|
302
|
-
|
303
|
-
// it should be the default topic, then
|
304
|
-
// if autoCreateTopic is enabled, check to whether we need to create a topic
|
305
|
-
if (!onlyAddUserMessage && !activeTopicId && agentConfig.enableAutoCreateTopic) {
|
306
|
-
// check activeTopic and then auto create topic
|
307
|
-
const chats = chatSelectors.currentChats(get());
|
308
|
-
|
309
|
-
// we will add two messages (user and assistant), so the finial length should +2
|
310
|
-
const featureLength = chats.length + 2;
|
311
|
-
|
312
|
-
// if there is no activeTopicId and the feature length is greater than the threshold
|
313
|
-
// then create a new topic and active it
|
314
|
-
if (!get().activeTopicId && featureLength >= agentConfig.autoCreateTopicThreshold) {
|
315
|
-
// we need to create a temp message for optimistic update
|
316
|
-
tempMessageId = get().internal_createTmpMessage(newMessage);
|
317
|
-
get().internal_toggleMessageLoading(true, tempMessageId);
|
318
|
-
|
319
|
-
const topicId = await get().createTopic();
|
320
|
-
|
321
|
-
if (topicId) {
|
322
|
-
newTopicId = topicId;
|
323
|
-
newMessage.topicId = topicId;
|
324
|
-
|
325
|
-
// we need to copy the messages to the new topic or the message will disappear
|
326
|
-
const mapKey = chatSelectors.currentChatKey(get());
|
327
|
-
const newMaps = {
|
328
|
-
...get().messagesMap,
|
329
|
-
[messageMapKey(activeId, topicId)]: get().messagesMap[mapKey],
|
330
|
-
};
|
331
|
-
set({ messagesMap: newMaps }, false, 'internal_copyMessages');
|
332
|
-
|
333
|
-
// get().internal_dispatchMessage({ type: 'deleteMessage', id: tempMessageId });
|
334
|
-
get().internal_toggleMessageLoading(false, tempMessageId);
|
335
|
-
|
336
|
-
// make the topic loading
|
337
|
-
get().internal_updateTopicLoading(topicId, true);
|
338
|
-
}
|
339
|
-
}
|
340
|
-
}
|
341
|
-
// update assistant update to make it rerank
|
342
|
-
useSessionStore.getState().triggerSessionUpdate(get().activeId);
|
343
|
-
|
344
|
-
const id = await get().internal_createMessage(newMessage, {
|
345
|
-
tempMessageId,
|
346
|
-
skipRefresh: !onlyAddUserMessage,
|
347
|
-
});
|
348
|
-
|
349
|
-
// switch to the new topic if create the new topic
|
350
|
-
if (!!newTopicId) {
|
351
|
-
await get().switchTopic(newTopicId, true);
|
352
|
-
await get().internal_fetchMessages();
|
353
|
-
|
354
|
-
// delete previous messages
|
355
|
-
// remove the temp message map
|
356
|
-
const newMaps = { ...get().messagesMap, [messageMapKey(activeId, null)]: [] };
|
357
|
-
set({ messagesMap: newMaps }, false, 'internal_copyMessages');
|
358
|
-
}
|
359
|
-
|
360
|
-
// if only add user message, then stop
|
361
|
-
if (onlyAddUserMessage) {
|
362
|
-
set({ isCreatingMessage: false }, false, 'creatingMessage/start');
|
363
|
-
return;
|
364
|
-
}
|
365
|
-
|
366
|
-
// Get the current messages to generate AI response
|
367
|
-
const messages = chatSelectors.currentChats(get());
|
368
|
-
const userFiles = chatSelectors.currentUserFiles(get()).map((f) => f.id);
|
369
|
-
|
370
|
-
await internal_coreProcessMessage(messages, id, {
|
371
|
-
isWelcomeQuestion,
|
372
|
-
ragQuery: get().internal_shouldUseRAG() ? message : undefined,
|
373
|
-
});
|
374
|
-
|
375
|
-
set({ isCreatingMessage: false }, false, 'creatingMessage/stop');
|
376
|
-
|
377
|
-
const summaryTitle = async () => {
|
378
|
-
// if autoCreateTopic is false, then stop
|
379
|
-
if (!agentConfig.enableAutoCreateTopic) return;
|
380
|
-
|
381
|
-
// check activeTopic and then auto update topic title
|
382
|
-
if (newTopicId) {
|
383
|
-
const chats = chatSelectors.currentChats(get());
|
384
|
-
await get().summaryTopicTitle(newTopicId, chats);
|
385
|
-
return;
|
386
|
-
}
|
387
|
-
|
388
|
-
const topic = topicSelectors.currentActiveTopic(get());
|
389
|
-
|
390
|
-
if (topic && !topic.title) {
|
391
|
-
const chats = chatSelectors.currentChats(get());
|
392
|
-
await get().summaryTopicTitle(topic.id, chats);
|
393
|
-
}
|
394
|
-
};
|
395
|
-
|
396
|
-
// if there is relative files, then add files to agent
|
397
|
-
// only available in server mode
|
398
|
-
const addFilesToAgent = async () => {
|
399
|
-
if (userFiles.length === 0 || !isServerMode) return;
|
400
|
-
|
401
|
-
await useAgentStore.getState().addFilesToAgent(userFiles, false);
|
402
|
-
};
|
403
|
-
|
404
|
-
await Promise.all([summaryTitle(), addFilesToAgent()]);
|
405
|
-
},
|
406
180
|
addAIMessage: async () => {
|
407
181
|
const { internal_createMessage, updateInputMessage, activeTopicId, activeId, inputMessage } =
|
408
182
|
get();
|
@@ -430,14 +204,6 @@ export const chatMessage: StateCreator<
|
|
430
204
|
'toggleMessageEditing',
|
431
205
|
);
|
432
206
|
},
|
433
|
-
stopGenerateMessage: () => {
|
434
|
-
const { abortController, internal_toggleChatLoading } = get();
|
435
|
-
if (!abortController) return;
|
436
|
-
|
437
|
-
abortController.abort(MESSAGE_CANCEL_FLAT);
|
438
|
-
|
439
|
-
internal_toggleChatLoading(false, undefined, n('stopGenerateMessage') as string);
|
440
|
-
},
|
441
207
|
|
442
208
|
updateInputMessage: (message) => {
|
443
209
|
if (isEqual(message, get().inputMessage)) return;
|
@@ -481,72 +247,6 @@ export const chatMessage: StateCreator<
|
|
481
247
|
},
|
482
248
|
|
483
249
|
// the internal process method of the AI message
|
484
|
-
internal_coreProcessMessage: async (originalMessages, userMessageId, params) => {
|
485
|
-
const { internal_fetchAIChatMessage, triggerToolCalls, refreshMessages, activeTopicId } = get();
|
486
|
-
|
487
|
-
// create a new array to avoid the original messages array change
|
488
|
-
const messages = [...originalMessages];
|
489
|
-
|
490
|
-
const { model, provider } = getAgentConfig();
|
491
|
-
|
492
|
-
let fileChunks: MessageSemanticSearchChunk[] | undefined;
|
493
|
-
let ragQueryId;
|
494
|
-
// go into RAG flow if there is ragQuery flag
|
495
|
-
if (params?.ragQuery) {
|
496
|
-
// 1. get the relative chunks from semantic search
|
497
|
-
const { chunks, queryId } = await get().internal_retrieveChunks(
|
498
|
-
userMessageId,
|
499
|
-
params?.ragQuery,
|
500
|
-
// should skip the last content
|
501
|
-
messages.map((m) => m.content).slice(0, messages.length - 1),
|
502
|
-
);
|
503
|
-
|
504
|
-
ragQueryId = queryId;
|
505
|
-
|
506
|
-
console.log('召回 chunks', chunks);
|
507
|
-
|
508
|
-
// 2. build the retrieve context messages
|
509
|
-
const retrieveContext = chainAnswerWithContext({
|
510
|
-
context: chunks.map((c) => c.text as string),
|
511
|
-
question: params?.ragQuery,
|
512
|
-
knowledge: getAgentKnowledge().map((knowledge) => knowledge.name),
|
513
|
-
});
|
514
|
-
|
515
|
-
// 3. add the retrieve context messages to the messages history
|
516
|
-
if (retrieveContext.messages && retrieveContext.messages?.length > 0) {
|
517
|
-
// remove the last message due to the query is in the retrieveContext
|
518
|
-
messages.pop();
|
519
|
-
retrieveContext.messages?.forEach((m) => messages.push(m as ChatMessage));
|
520
|
-
}
|
521
|
-
|
522
|
-
fileChunks = chunks.map((c) => ({ id: c.id, similarity: c.similarity }));
|
523
|
-
}
|
524
|
-
|
525
|
-
// 2. Add an empty message to place the AI response
|
526
|
-
const assistantMessage: CreateMessageParams = {
|
527
|
-
role: 'assistant',
|
528
|
-
content: LOADING_FLAT,
|
529
|
-
fromModel: model,
|
530
|
-
fromProvider: provider,
|
531
|
-
|
532
|
-
parentId: userMessageId,
|
533
|
-
sessionId: get().activeId,
|
534
|
-
topicId: activeTopicId, // if there is activeTopicId,then add it to topicId
|
535
|
-
fileChunks,
|
536
|
-
ragQueryId,
|
537
|
-
};
|
538
|
-
|
539
|
-
const assistantId = await get().internal_createMessage(assistantMessage);
|
540
|
-
|
541
|
-
// 3. fetch the AI response
|
542
|
-
const { isFunctionCall } = await internal_fetchAIChatMessage(messages, assistantId, params);
|
543
|
-
|
544
|
-
// 4. if it's the function call message, trigger the function method
|
545
|
-
if (isFunctionCall) {
|
546
|
-
await refreshMessages();
|
547
|
-
await triggerToolCalls(assistantId);
|
548
|
-
}
|
549
|
-
},
|
550
250
|
internal_dispatchMessage: (payload) => {
|
551
251
|
const { activeId } = get();
|
552
252
|
|
@@ -560,185 +260,6 @@ export const chatMessage: StateCreator<
|
|
560
260
|
|
561
261
|
set({ messagesMap: nextMap }, false, { type: `dispatchMessage/${payload.type}`, payload });
|
562
262
|
},
|
563
|
-
internal_fetchAIChatMessage: async (messages, assistantId, params) => {
|
564
|
-
const {
|
565
|
-
internal_toggleChatLoading,
|
566
|
-
refreshMessages,
|
567
|
-
internal_updateMessageContent,
|
568
|
-
internal_dispatchMessage,
|
569
|
-
internal_toggleToolCallingStreaming,
|
570
|
-
} = get();
|
571
|
-
|
572
|
-
const abortController = internal_toggleChatLoading(
|
573
|
-
true,
|
574
|
-
assistantId,
|
575
|
-
n('generateMessage(start)', { assistantId, messages }) as string,
|
576
|
-
);
|
577
|
-
|
578
|
-
const agentConfig = getAgentConfig();
|
579
|
-
const chatConfig = agentConfig.chatConfig;
|
580
|
-
|
581
|
-
const compiler = template(chatConfig.inputTemplate, { interpolate: /{{([\S\s]+?)}}/g });
|
582
|
-
|
583
|
-
// ================================== //
|
584
|
-
// messages uniformly preprocess //
|
585
|
-
// ================================== //
|
586
|
-
|
587
|
-
// 1. slice messages with config
|
588
|
-
let preprocessMsgs = chatHelpers.getSlicedMessagesWithConfig(messages, chatConfig);
|
589
|
-
|
590
|
-
// 2. replace inputMessage template
|
591
|
-
preprocessMsgs = !chatConfig.inputTemplate
|
592
|
-
? preprocessMsgs
|
593
|
-
: preprocessMsgs.map((m) => {
|
594
|
-
if (m.role === 'user') {
|
595
|
-
try {
|
596
|
-
return { ...m, content: compiler({ text: m.content }) };
|
597
|
-
} catch (error) {
|
598
|
-
console.error(error);
|
599
|
-
|
600
|
-
return m;
|
601
|
-
}
|
602
|
-
}
|
603
|
-
|
604
|
-
return m;
|
605
|
-
});
|
606
|
-
|
607
|
-
// 3. add systemRole
|
608
|
-
if (agentConfig.systemRole) {
|
609
|
-
preprocessMsgs.unshift({ content: agentConfig.systemRole, role: 'system' } as ChatMessage);
|
610
|
-
}
|
611
|
-
|
612
|
-
// 4. handle max_tokens
|
613
|
-
agentConfig.params.max_tokens = chatConfig.enableMaxTokens
|
614
|
-
? agentConfig.params.max_tokens
|
615
|
-
: undefined;
|
616
|
-
|
617
|
-
// 5. handle config for the vision model
|
618
|
-
// Due to the gpt-4-vision-preview model's default max_tokens is very small
|
619
|
-
// we need to set the max_tokens a larger one.
|
620
|
-
if (agentConfig.model === 'gpt-4-vision-preview') {
|
621
|
-
/* eslint-disable unicorn/no-lonely-if */
|
622
|
-
if (!agentConfig.params.max_tokens)
|
623
|
-
// refs: https://github.com/lobehub/lobe-chat/issues/837
|
624
|
-
agentConfig.params.max_tokens = 2048;
|
625
|
-
}
|
626
|
-
|
627
|
-
let isFunctionCall = false;
|
628
|
-
let msgTraceId: string | undefined;
|
629
|
-
let output = '';
|
630
|
-
|
631
|
-
await chatService.createAssistantMessageStream({
|
632
|
-
abortController,
|
633
|
-
params: {
|
634
|
-
messages: preprocessMsgs,
|
635
|
-
model: agentConfig.model,
|
636
|
-
provider: agentConfig.provider,
|
637
|
-
...agentConfig.params,
|
638
|
-
plugins: agentConfig.plugins,
|
639
|
-
},
|
640
|
-
trace: {
|
641
|
-
traceId: params?.traceId,
|
642
|
-
sessionId: get().activeId,
|
643
|
-
topicId: get().activeTopicId,
|
644
|
-
traceName: TraceNameMap.Conversation,
|
645
|
-
},
|
646
|
-
isWelcomeQuestion: params?.isWelcomeQuestion,
|
647
|
-
onErrorHandle: async (error) => {
|
648
|
-
await messageService.updateMessageError(assistantId, error);
|
649
|
-
await refreshMessages();
|
650
|
-
},
|
651
|
-
onFinish: async (content, { traceId, observationId, toolCalls }) => {
|
652
|
-
// if there is traceId, update it
|
653
|
-
if (traceId) {
|
654
|
-
msgTraceId = traceId;
|
655
|
-
await messageService.updateMessage(assistantId, {
|
656
|
-
traceId,
|
657
|
-
observationId: observationId ?? undefined,
|
658
|
-
});
|
659
|
-
}
|
660
|
-
|
661
|
-
if (toolCalls && toolCalls.length > 0) {
|
662
|
-
internal_toggleToolCallingStreaming(assistantId, undefined);
|
663
|
-
}
|
664
|
-
|
665
|
-
// update the content after fetch result
|
666
|
-
await internal_updateMessageContent(assistantId, content, toolCalls);
|
667
|
-
},
|
668
|
-
onMessageHandle: async (chunk) => {
|
669
|
-
switch (chunk.type) {
|
670
|
-
case 'text': {
|
671
|
-
output += chunk.text;
|
672
|
-
internal_dispatchMessage({
|
673
|
-
id: assistantId,
|
674
|
-
type: 'updateMessage',
|
675
|
-
value: { content: output },
|
676
|
-
});
|
677
|
-
break;
|
678
|
-
}
|
679
|
-
|
680
|
-
// is this message is just a tool call
|
681
|
-
case 'tool_calls': {
|
682
|
-
internal_toggleToolCallingStreaming(assistantId, chunk.isAnimationActives);
|
683
|
-
internal_dispatchMessage({
|
684
|
-
id: assistantId,
|
685
|
-
type: 'updateMessage',
|
686
|
-
value: { tools: get().internal_transformToolCalls(chunk.tool_calls) },
|
687
|
-
});
|
688
|
-
isFunctionCall = true;
|
689
|
-
}
|
690
|
-
}
|
691
|
-
},
|
692
|
-
});
|
693
|
-
|
694
|
-
internal_toggleChatLoading(false, assistantId, n('generateMessage(end)') as string);
|
695
|
-
|
696
|
-
return {
|
697
|
-
isFunctionCall,
|
698
|
-
traceId: msgTraceId,
|
699
|
-
};
|
700
|
-
},
|
701
|
-
|
702
|
-
internal_resendMessage: async (messageId, traceId) => {
|
703
|
-
// 1. 构造所有相关的历史记录
|
704
|
-
const chats = chatSelectors.currentChats(get());
|
705
|
-
|
706
|
-
const currentIndex = chats.findIndex((c) => c.id === messageId);
|
707
|
-
if (currentIndex < 0) return;
|
708
|
-
|
709
|
-
const currentMessage = chats[currentIndex];
|
710
|
-
|
711
|
-
let contextMessages: ChatMessage[] = [];
|
712
|
-
|
713
|
-
switch (currentMessage.role) {
|
714
|
-
case 'tool':
|
715
|
-
case 'user': {
|
716
|
-
contextMessages = chats.slice(0, currentIndex + 1);
|
717
|
-
break;
|
718
|
-
}
|
719
|
-
case 'assistant': {
|
720
|
-
// 消息是 AI 发出的因此需要找到它的 user 消息
|
721
|
-
const userId = currentMessage.parentId;
|
722
|
-
const userIndex = chats.findIndex((c) => c.id === userId);
|
723
|
-
// 如果消息没有 parentId,那么同 user/function 模式
|
724
|
-
contextMessages = chats.slice(0, userIndex < 0 ? currentIndex + 1 : userIndex + 1);
|
725
|
-
break;
|
726
|
-
}
|
727
|
-
}
|
728
|
-
|
729
|
-
if (contextMessages.length <= 0) return;
|
730
|
-
|
731
|
-
const { internal_coreProcessMessage } = get();
|
732
|
-
|
733
|
-
const latestMsg = contextMessages.findLast((s) => s.role === 'user');
|
734
|
-
|
735
|
-
if (!latestMsg) return;
|
736
|
-
|
737
|
-
await internal_coreProcessMessage(contextMessages, latestMsg.id, {
|
738
|
-
traceId,
|
739
|
-
ragQuery: get().internal_shouldUseRAG() ? currentMessage.content : undefined,
|
740
|
-
});
|
741
|
-
},
|
742
263
|
|
743
264
|
internal_updateMessageError: async (id, error) => {
|
744
265
|
get().internal_dispatchMessage({ id, type: 'updateMessage', value: { error } });
|
@@ -838,25 +359,6 @@ export const chatMessage: StateCreator<
|
|
838
359
|
'internal_toggleMessageLoading',
|
839
360
|
);
|
840
361
|
},
|
841
|
-
internal_toggleChatLoading: (loading, id, action) => {
|
842
|
-
return get().internal_toggleLoadingArrays('chatLoadingIds', loading, id, action);
|
843
|
-
},
|
844
|
-
internal_toggleToolCallingStreaming: (id, streaming) => {
|
845
|
-
set(
|
846
|
-
{
|
847
|
-
toolCallingStreamIds: produce(get().toolCallingStreamIds, (draft) => {
|
848
|
-
if (!!streaming) {
|
849
|
-
draft[id] = streaming;
|
850
|
-
} else {
|
851
|
-
delete draft[id];
|
852
|
-
}
|
853
|
-
}),
|
854
|
-
},
|
855
|
-
|
856
|
-
false,
|
857
|
-
'toggleToolCallingStreaming',
|
858
|
-
);
|
859
|
-
},
|
860
362
|
internal_toggleLoadingArrays: (key, loading, id, action) => {
|
861
363
|
if (loading) {
|
862
364
|
window.addEventListener('beforeunload', preventLeavingFn);
|
@@ -1,18 +1,12 @@
|
|
1
1
|
import { ChatMessage } from '@/types/message';
|
2
2
|
|
3
3
|
export interface ChatMessageState {
|
4
|
-
abortController?: AbortController;
|
5
4
|
/**
|
6
5
|
* @title 当前活动的会话
|
7
6
|
* @description 当前正在编辑或查看的会话
|
8
7
|
*/
|
9
8
|
activeId: string;
|
10
|
-
|
11
|
-
* is the AI message is generating
|
12
|
-
*/
|
13
|
-
chatLoadingIds: string[];
|
14
|
-
inputFiles: File[];
|
15
|
-
inputMessage: string;
|
9
|
+
|
16
10
|
isCreatingMessage: boolean;
|
17
11
|
/**
|
18
12
|
* is the message is editing
|
@@ -22,33 +16,18 @@ export interface ChatMessageState {
|
|
22
16
|
* is the message is creating or updating in the service
|
23
17
|
*/
|
24
18
|
messageLoadingIds: string[];
|
25
|
-
/**
|
26
|
-
* is the message is in RAG flow
|
27
|
-
*/
|
28
|
-
messageRAGLoadingIds: string[];
|
29
19
|
/**
|
30
20
|
* whether messages have fetched
|
31
21
|
*/
|
32
22
|
messagesInit: boolean;
|
33
23
|
messagesMap: Record<string, ChatMessage[]>;
|
34
|
-
pluginApiLoadingIds: string[];
|
35
|
-
/**
|
36
|
-
* the tool calling stream ids
|
37
|
-
*/
|
38
|
-
toolCallingStreamIds: Record<string, boolean[]>;
|
39
24
|
}
|
40
25
|
|
41
26
|
export const initialMessageState: ChatMessageState = {
|
42
27
|
activeId: 'inbox',
|
43
|
-
chatLoadingIds: [],
|
44
|
-
inputFiles: [],
|
45
|
-
inputMessage: '',
|
46
28
|
isCreatingMessage: false,
|
47
29
|
messageEditingIds: [],
|
48
30
|
messageLoadingIds: [],
|
49
|
-
messageRAGLoadingIds: [],
|
50
31
|
messagesInit: false,
|
51
32
|
messagesMap: {},
|
52
|
-
pluginApiLoadingIds: [],
|
53
|
-
toolCallingStreamIds: {},
|
54
33
|
};
|
@@ -6,7 +6,7 @@ import { INBOX_SESSION_ID } from '@/const/session';
|
|
6
6
|
import { useAgentStore } from '@/store/agent';
|
7
7
|
import { ChatStore } from '@/store/chat';
|
8
8
|
import { initialState } from '@/store/chat/initialState';
|
9
|
-
import { messageMapKey } from '@/store/chat/
|
9
|
+
import { messageMapKey } from '@/store/chat/utils/messageMapKey';
|
10
10
|
import { createServerConfigStore } from '@/store/serverConfig/store';
|
11
11
|
import { LobeAgentConfig } from '@/types/agent';
|
12
12
|
import { ChatMessage } from '@/types/message';
|