@lobehub/chat 1.121.0 → 1.122.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +51 -0
  2. package/apps/desktop/package.json +1 -0
  3. package/apps/desktop/src/main/modules/networkProxy/dispatcher.ts +24 -2
  4. package/changelog/v1.json +18 -0
  5. package/next.config.ts +1 -0
  6. package/package.json +1 -1
  7. package/packages/const/src/index.ts +1 -0
  8. package/packages/const/src/session.ts +4 -11
  9. package/packages/database/src/models/__tests__/message.test.ts +41 -0
  10. package/packages/database/src/models/message.ts +21 -13
  11. package/packages/database/src/models/topic.ts +4 -9
  12. package/packages/types/src/aiChat.ts +55 -0
  13. package/packages/types/src/index.ts +4 -0
  14. package/packages/types/src/message/base.ts +17 -4
  15. package/packages/types/src/message/chat.ts +1 -15
  16. package/packages/types/src/message/index.ts +1 -0
  17. package/packages/types/src/message/rag.ts +21 -0
  18. package/packages/utils/src/index.ts +1 -0
  19. package/packages/utils/src/object.test.ts +11 -0
  20. package/src/app/[variants]/(main)/chat/(workspace)/@topic/features/TopicListContent/ByTimeMode/index.tsx +3 -3
  21. package/src/server/routers/lambda/__tests__/message.test.ts +30 -0
  22. package/src/server/routers/lambda/aiChat.test.ts +107 -0
  23. package/src/server/routers/lambda/aiChat.ts +80 -0
  24. package/src/server/routers/lambda/index.ts +2 -0
  25. package/src/server/routers/lambda/message.ts +7 -0
  26. package/src/server/services/aiChat/index.test.ts +57 -0
  27. package/src/server/services/aiChat/index.ts +36 -0
  28. package/src/services/aiChat.ts +12 -0
  29. package/src/services/message/_deprecated.ts +4 -0
  30. package/src/services/message/client.ts +5 -0
  31. package/src/services/message/server.ts +4 -0
  32. package/src/services/message/type.ts +2 -0
  33. package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +11 -1
  34. package/src/store/chat/slices/aiChat/actions/generateAIChatV2.ts +410 -0
  35. package/src/store/chat/slices/aiChat/actions/index.ts +7 -1
  36. package/src/store/chat/slices/message/action.ts +38 -1
  37. package/src/store/chat/slices/message/reducer.ts +11 -0
  38. package/src/store/chat/slices/topic/reducer.ts +14 -1
@@ -0,0 +1,80 @@
1
+ import { AiSendMessageServerSchema, SendMessageServerResponse } from '@lobechat/types';
2
+
3
+ import { LOADING_FLAT } from '@/const/message';
4
+ import { MessageModel } from '@/database/models/message';
5
+ import { TopicModel } from '@/database/models/topic';
6
+ import { authedProcedure, router } from '@/libs/trpc/lambda';
7
+ import { serverDatabase } from '@/libs/trpc/lambda/middleware';
8
+ import { AiChatService } from '@/server/services/aiChat';
9
+ import { FileService } from '@/server/services/file';
10
+
11
+ const aiChatProcedure = authedProcedure.use(serverDatabase).use(async (opts) => {
12
+ const { ctx } = opts;
13
+
14
+ return opts.next({
15
+ ctx: {
16
+ aiChatService: new AiChatService(ctx.serverDB, ctx.userId),
17
+ fileService: new FileService(ctx.serverDB, ctx.userId),
18
+ messageModel: new MessageModel(ctx.serverDB, ctx.userId),
19
+ topicModel: new TopicModel(ctx.serverDB, ctx.userId),
20
+ },
21
+ });
22
+ });
23
+
24
+ export const aiChatRouter = router({
25
+ sendMessageInServer: aiChatProcedure
26
+ .input(AiSendMessageServerSchema)
27
+ .mutation(async ({ input, ctx }) => {
28
+ let messageId: string;
29
+ let topicId = input.topicId!;
30
+
31
+ let isCreatNewTopic = false;
32
+
33
+ // create topic if there should be a new topic
34
+ if (input.newTopic) {
35
+ const topicItem = await ctx.topicModel.create({
36
+ messages: input.newTopic.topicMessageIds,
37
+ sessionId: input.sessionId,
38
+ title: input.newTopic.title,
39
+ });
40
+ topicId = topicItem.id;
41
+ isCreatNewTopic = true;
42
+ }
43
+
44
+ // create user message
45
+ const userMessageItem = await ctx.messageModel.create({
46
+ content: input.newUserMessage.content,
47
+ files: input.newUserMessage.files,
48
+ role: 'user',
49
+ sessionId: input.sessionId!,
50
+ topicId,
51
+ });
52
+
53
+ messageId = userMessageItem.id;
54
+ // create assistant message
55
+ const assistantMessageItem = await ctx.messageModel.create({
56
+ content: LOADING_FLAT,
57
+ fromModel: input.newAssistantMessage.model,
58
+ parentId: messageId,
59
+ role: 'assistant',
60
+ sessionId: input.sessionId!,
61
+ topicId,
62
+ });
63
+
64
+ // retrieve latest messages and topic with
65
+ const { messages, topics } = await ctx.aiChatService.getMessagesAndTopics({
66
+ includeTopic: isCreatNewTopic,
67
+ sessionId: input.sessionId,
68
+ topicId,
69
+ });
70
+
71
+ return {
72
+ assistantMessageId: assistantMessageItem.id,
73
+ isCreatNewTopic,
74
+ messages,
75
+ topicId,
76
+ topics,
77
+ userMessageId: messageId,
78
+ } as SendMessageServerResponse;
79
+ }),
80
+ });
@@ -4,6 +4,7 @@
4
4
  import { publicProcedure, router } from '@/libs/trpc/lambda';
5
5
 
6
6
  import { agentRouter } from './agent';
7
+ import { aiChatRouter } from './aiChat';
7
8
  import { aiModelRouter } from './aiModel';
8
9
  import { aiProviderRouter } from './aiProvider';
9
10
  import { apiKeyRouter } from './apiKey';
@@ -30,6 +31,7 @@ import { userRouter } from './user';
30
31
 
31
32
  export const lambdaRouter = router({
32
33
  agent: agentRouter,
34
+ aiChat: aiChatRouter,
33
35
  aiModel: aiModelRouter,
34
36
  aiProvider: aiProviderRouter,
35
37
  apiKey: apiKeyRouter,
@@ -7,6 +7,7 @@ import { authedProcedure, publicProcedure, router } from '@/libs/trpc/lambda';
7
7
  import { serverDatabase } from '@/libs/trpc/lambda/middleware';
8
8
  import { FileService } from '@/server/services/file';
9
9
  import { ChatMessage } from '@/types/message';
10
+ import { UpdateMessageRAGParamsSchema } from '@/types/message/rag';
10
11
  import { BatchTaskResult } from '@/types/service';
11
12
 
12
13
  type ChatMessageList = ChatMessage[];
@@ -174,6 +175,12 @@ export const messageRouter = router({
174
175
  return ctx.messageModel.updateMessagePlugin(input.id, input.value);
175
176
  }),
176
177
 
178
+ updateMessageRAG: messageProcedure
179
+ .input(UpdateMessageRAGParamsSchema)
180
+ .mutation(async ({ input, ctx }) => {
181
+ await ctx.messageModel.updateMessageRAG(input.id, input.value);
182
+ }),
183
+
177
184
  updatePluginError: messageProcedure
178
185
  .input(
179
186
  z.object({
@@ -0,0 +1,57 @@
1
+ import { describe, expect, it, vi } from 'vitest';
2
+
3
+ import { MessageModel } from '@/database/models/message';
4
+ import { TopicModel } from '@/database/models/topic';
5
+ import { LobeChatDatabase } from '@/database/type';
6
+ import { FileService } from '@/server/services/file';
7
+
8
+ import { AiChatService } from '.';
9
+
10
+ vi.mock('@/database/models/message');
11
+ vi.mock('@/database/models/topic');
12
+ vi.mock('@/server/services/file');
13
+
14
+ describe('AiChatService', () => {
15
+ it('getMessagesAndTopics should fetch messages and topics concurrently', async () => {
16
+ const serverDB = {} as unknown as LobeChatDatabase;
17
+
18
+ const mockQueryMessages = vi.fn().mockResolvedValue([{ id: 'm1' }]);
19
+ const mockQueryTopics = vi.fn().mockResolvedValue([{ id: 't1' }]);
20
+
21
+ vi.mocked(MessageModel).mockImplementation(() => ({ query: mockQueryMessages }) as any);
22
+ vi.mocked(TopicModel).mockImplementation(() => ({ query: mockQueryTopics }) as any);
23
+ vi.mocked(FileService).mockImplementation(
24
+ () => ({ getFullFileUrl: vi.fn().mockResolvedValue('url') }) as any,
25
+ );
26
+
27
+ const service = new AiChatService(serverDB, 'u1');
28
+
29
+ const res = await service.getMessagesAndTopics({ includeTopic: true, sessionId: 's1' });
30
+
31
+ expect(mockQueryMessages).toHaveBeenCalledWith(
32
+ { includeTopic: true, sessionId: 's1' },
33
+ expect.objectContaining({ postProcessUrl: expect.any(Function) }),
34
+ );
35
+ expect(mockQueryTopics).toHaveBeenCalledWith({ sessionId: 's1' });
36
+ expect(res.messages).toEqual([{ id: 'm1' }]);
37
+ expect(res.topics).toEqual([{ id: 't1' }]);
38
+ });
39
+
40
+ it('getMessagesAndTopics should not query topics when includeTopic is false', async () => {
41
+ const serverDB = {} as unknown as LobeChatDatabase;
42
+
43
+ const mockQueryMessages = vi.fn().mockResolvedValue([]);
44
+ vi.mocked(MessageModel).mockImplementation(() => ({ query: mockQueryMessages }) as any);
45
+ vi.mocked(TopicModel).mockImplementation(() => ({ query: vi.fn() }) as any);
46
+ vi.mocked(FileService).mockImplementation(
47
+ () => ({ getFullFileUrl: vi.fn().mockResolvedValue('url') }) as any,
48
+ );
49
+
50
+ const service = new AiChatService(serverDB, 'u1');
51
+
52
+ const res = await service.getMessagesAndTopics({ includeTopic: false, topicId: 't1' });
53
+
54
+ expect(mockQueryMessages).toHaveBeenCalled();
55
+ expect(res.topics).toBeUndefined();
56
+ });
57
+ });
@@ -0,0 +1,36 @@
1
+ import { MessageModel } from '@/database/models/message';
2
+ import { TopicModel } from '@/database/models/topic';
3
+ import { LobeChatDatabase } from '@/database/type';
4
+ import { FileService } from '@/server/services/file';
5
+
6
+ export class AiChatService {
7
+ private userId: string;
8
+ private messageModel: MessageModel;
9
+ private fileService: FileService;
10
+ private topicModel: TopicModel;
11
+
12
+ constructor(serverDB: LobeChatDatabase, userId: string) {
13
+ this.userId = userId;
14
+
15
+ this.messageModel = new MessageModel(serverDB, userId);
16
+ this.topicModel = new TopicModel(serverDB, userId);
17
+ this.fileService = new FileService(serverDB, userId);
18
+ }
19
+
20
+ async getMessagesAndTopics(params: {
21
+ current?: number;
22
+ includeTopic?: boolean;
23
+ pageSize?: number;
24
+ sessionId?: string;
25
+ topicId?: string;
26
+ }) {
27
+ const [messages, topics] = await Promise.all([
28
+ this.messageModel.query(params, {
29
+ postProcessUrl: (path) => this.fileService.getFullFileUrl(path),
30
+ }),
31
+ params.includeTopic ? this.topicModel.query({ sessionId: params.sessionId }) : undefined,
32
+ ]);
33
+
34
+ return { messages, topics };
35
+ }
36
+ }
@@ -0,0 +1,12 @@
1
+ import { SendMessageServerParams } from '@lobechat/types';
2
+ import { cleanObject } from '@lobechat/utils';
3
+
4
+ import { lambdaClient } from '@/libs/trpc/client';
5
+
6
+ class AiChatService {
7
+ sendMessageInServer = async (params: SendMessageServerParams) => {
8
+ return lambdaClient.aiChat.sendMessageInServer.mutate(cleanObject(params));
9
+ };
10
+ }
11
+
12
+ export const aiChatService = new AiChatService();
@@ -143,4 +143,8 @@ export class ClientService implements IMessageService {
143
143
  async updateMessagePluginError() {
144
144
  throw new Error('Method not implemented.');
145
145
  }
146
+
147
+ async updateMessageRAG(): Promise<void> {
148
+ throw new Error('Method not implemented.');
149
+ }
146
150
  }
@@ -96,6 +96,11 @@ export class ClientService extends BaseClientService implements IMessageService
96
96
  return this.messageModel.updateMessagePlugin(id, { error: value });
97
97
  };
98
98
 
99
+ updateMessageRAG: IMessageService['updateMessageRAG'] = async (id, value) => {
100
+ console.log(id, value);
101
+ throw new Error('not implemented');
102
+ };
103
+
99
104
  updateMessagePluginArguments: IMessageService['updateMessagePluginArguments'] = async (
100
105
  id,
101
106
  value,
@@ -84,6 +84,10 @@ export class ServerService implements IMessageService {
84
84
  return lambdaClient.message.updatePluginError.mutate({ id, value: error as any });
85
85
  };
86
86
 
87
+ updateMessageRAG: IMessageService['updateMessageRAG'] = async (id, data) => {
88
+ return lambdaClient.message.updateMessageRAG.mutate({ id, value: data });
89
+ };
90
+
87
91
  removeMessage: IMessageService['removeMessage'] = async (id) => {
88
92
  return lambdaClient.message.removeMessage.mutate({ id });
89
93
  };
@@ -11,6 +11,7 @@ import {
11
11
  ModelRankItem,
12
12
  UpdateMessageParams,
13
13
  } from '@/types/message';
14
+ import { UpdateMessageRAGParams } from '@/types/message/rag';
14
15
 
15
16
  /* eslint-disable typescript-sort-keys/interface */
16
17
 
@@ -39,6 +40,7 @@ export interface IMessageService {
39
40
  updateMessageTranslate(id: string, translate: Partial<ChatTranslate> | false): Promise<any>;
40
41
  updateMessagePluginState(id: string, value: Record<string, any>): Promise<any>;
41
42
  updateMessagePluginError(id: string, value: ChatMessagePluginError | null): Promise<any>;
43
+ updateMessageRAG(id: string, value: UpdateMessageRAGParams): Promise<void>;
42
44
  updateMessagePluginArguments(id: string, value: string | Record<string, any>): Promise<any>;
43
45
  removeMessage(id: string): Promise<any>;
44
46
  removeMessages(ids: string[]): Promise<any>;
@@ -152,7 +152,13 @@ export const generateAIChat: StateCreator<
152
152
  },
153
153
 
154
154
  sendMessage: async ({ message, files, onlyAddUserMessage, isWelcomeQuestion }) => {
155
- const { internal_coreProcessMessage, activeTopicId, activeId, activeThreadId } = get();
155
+ const {
156
+ internal_coreProcessMessage,
157
+ activeTopicId,
158
+ activeId,
159
+ activeThreadId,
160
+ sendMessageInServer,
161
+ } = get();
156
162
  if (!activeId) return;
157
163
 
158
164
  const fileIdList = files?.map((f) => f.id);
@@ -162,6 +168,10 @@ export const generateAIChat: StateCreator<
162
168
  // if message is empty or no files, then stop
163
169
  if (!message && !hasFile) return;
164
170
 
171
+ // router to server mode send message
172
+ if (isServerMode)
173
+ return sendMessageInServer({ message, files, onlyAddUserMessage, isWelcomeQuestion });
174
+
165
175
  set({ isCreatingMessage: true }, false, n('creatingMessage/start'));
166
176
 
167
177
  const newMessage: CreateMessageParams = {