@lobehub/lobehub 2.0.0-next.35 → 2.0.0-next.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/next.config.ts +5 -6
- package/package.json +2 -2
- package/packages/agent-runtime/src/core/__tests__/runtime.test.ts +112 -77
- package/packages/agent-runtime/src/core/runtime.ts +63 -18
- package/packages/agent-runtime/src/types/generalAgent.ts +55 -0
- package/packages/agent-runtime/src/types/index.ts +1 -0
- package/packages/agent-runtime/src/types/instruction.ts +10 -3
- package/packages/const/src/user.ts +0 -1
- package/packages/context-engine/src/processors/GroupMessageFlatten.ts +8 -6
- package/packages/context-engine/src/processors/__tests__/GroupMessageFlatten.test.ts +12 -12
- package/packages/conversation-flow/src/__tests__/fixtures/inputs/branch/assistant-group-branches.json +249 -0
- package/packages/conversation-flow/src/__tests__/fixtures/inputs/branch/index.ts +4 -0
- package/packages/conversation-flow/src/__tests__/fixtures/inputs/branch/multi-assistant-group.json +260 -0
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/active-index-1.json +4 -0
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/assistant-group-branches.json +481 -0
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/conversation.json +5 -1
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/index.ts +4 -0
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/multi-assistant-group.json +407 -0
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/branch/nested.json +18 -2
- package/packages/conversation-flow/src/__tests__/fixtures/outputs/complex-scenario.json +25 -3
- package/packages/conversation-flow/src/__tests__/parse.test.ts +12 -0
- package/packages/conversation-flow/src/index.ts +1 -1
- package/packages/conversation-flow/src/transformation/FlatListBuilder.ts +112 -34
- package/packages/conversation-flow/src/types/flatMessageList.ts +0 -12
- package/packages/conversation-flow/src/{types.ts → types/index.ts} +3 -14
- package/packages/database/src/models/message.ts +18 -19
- package/packages/types/src/aiChat.ts +2 -0
- package/packages/types/src/importer.ts +2 -2
- package/packages/types/src/message/ui/chat.ts +17 -1
- package/packages/types/src/message/ui/extra.ts +2 -2
- package/packages/types/src/message/ui/params.ts +2 -2
- package/packages/types/src/user/preference.ts +0 -4
- package/packages/utils/src/tokenizer/index.ts +3 -11
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatInput/Desktop/MessageFromUrl.tsx +3 -3
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatInput/V1Mobile/index.tsx +1 -1
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatInput/V1Mobile/useSend.ts +3 -3
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatInput/useSend.ts +6 -6
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatList/Content.tsx +5 -3
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatList/WelcomeChatItem/AgentWelcome/OpeningQuestions.tsx +2 -2
- package/src/app/[variants]/(main)/chat/components/conversation/features/ChatList/WelcomeChatItem/GroupWelcome/GroupUsageSuggest.tsx +2 -2
- package/src/app/[variants]/(main)/labs/page.tsx +0 -9
- package/src/features/ChatInput/ActionBar/STT/browser.tsx +3 -3
- package/src/features/ChatInput/ActionBar/STT/openai.tsx +3 -3
- package/src/features/Conversation/Error/AccessCodeForm.tsx +1 -1
- package/src/features/Conversation/Error/ChatInvalidApiKey.tsx +1 -1
- package/src/features/Conversation/Error/ClerkLogin/index.tsx +1 -1
- package/src/features/Conversation/Error/OAuthForm.tsx +1 -1
- package/src/features/Conversation/Error/index.tsx +0 -5
- package/src/features/Conversation/Messages/Assistant/Actions/index.tsx +13 -10
- package/src/features/Conversation/Messages/Assistant/Extra/index.test.tsx +3 -8
- package/src/features/Conversation/Messages/Assistant/Extra/index.tsx +2 -6
- package/src/features/Conversation/Messages/Assistant/MessageContent.tsx +7 -9
- package/src/features/Conversation/Messages/Assistant/Tool/Inspector/PluginResult.tsx +2 -2
- package/src/features/Conversation/Messages/Assistant/Tool/Inspector/PluginState.tsx +2 -2
- package/src/features/Conversation/Messages/Assistant/Tool/Render/PluginSettings.tsx +4 -1
- package/src/features/Conversation/Messages/Assistant/Tool/Render/index.tsx +2 -3
- package/src/features/Conversation/Messages/Assistant/index.tsx +57 -60
- package/src/features/Conversation/Messages/Default.tsx +1 -0
- package/src/features/Conversation/Messages/Group/Actions/WithContentId.tsx +38 -10
- package/src/features/Conversation/Messages/Group/Actions/index.tsx +1 -1
- package/src/features/Conversation/Messages/Group/ContentBlock.tsx +1 -3
- package/src/features/Conversation/Messages/Group/GroupChildren.tsx +12 -12
- package/src/features/Conversation/Messages/Group/MessageContent.tsx +7 -1
- package/src/features/Conversation/Messages/Group/Tool/Render/PluginSettings.tsx +1 -1
- package/src/features/Conversation/Messages/Group/index.tsx +2 -1
- package/src/features/Conversation/Messages/Supervisor/index.tsx +2 -2
- package/src/features/Conversation/Messages/User/{Actions.tsx → Actions/ActionsBar.tsx} +26 -25
- package/src/features/Conversation/Messages/User/Actions/MessageBranch.tsx +107 -0
- package/src/features/Conversation/Messages/User/Actions/index.tsx +42 -0
- package/src/features/Conversation/Messages/User/index.tsx +43 -44
- package/src/features/Conversation/Messages/index.tsx +3 -3
- package/src/features/Conversation/components/AutoScroll.tsx +3 -3
- package/src/features/Conversation/components/Extras/Usage/UsageDetail/AnimatedNumber.tsx +55 -0
- package/src/features/Conversation/components/Extras/Usage/UsageDetail/index.tsx +5 -2
- package/src/features/Conversation/components/VirtualizedList/index.tsx +29 -20
- package/src/features/Conversation/hooks/useChatListActionsBar.tsx +8 -10
- package/src/features/Portal/Thread/Chat/ChatInput/useSend.ts +3 -3
- package/src/hooks/useHotkeys/chatScope.ts +15 -7
- package/src/server/routers/lambda/__tests__/aiChat.test.ts +1 -1
- package/src/server/routers/lambda/__tests__/integration/message.integration.test.ts +0 -26
- package/src/server/routers/lambda/aiChat.ts +3 -2
- package/src/server/routers/lambda/message.ts +8 -16
- package/src/server/services/message/__tests__/index.test.ts +29 -39
- package/src/server/services/message/index.ts +41 -36
- package/src/services/electron/desktopNotification.ts +6 -6
- package/src/services/electron/file.ts +6 -6
- package/src/services/file/ClientS3/index.ts +8 -8
- package/src/services/message/__tests__/metadata-race-condition.test.ts +157 -0
- package/src/services/message/index.ts +21 -15
- package/src/services/upload.ts +11 -11
- package/src/services/utils/abortableRequest.test.ts +161 -0
- package/src/services/utils/abortableRequest.ts +67 -0
- package/src/store/chat/agents/GeneralChatAgent.ts +137 -0
- package/src/store/chat/agents/createAgentExecutors.ts +395 -0
- package/src/store/chat/helpers.test.ts +0 -99
- package/src/store/chat/helpers.ts +0 -11
- package/src/store/chat/slices/aiChat/actions/__tests__/conversationControl.test.ts +332 -0
- package/src/store/chat/slices/aiChat/actions/__tests__/conversationLifecycle.test.ts +257 -0
- package/src/store/chat/slices/aiChat/actions/__tests__/helpers.ts +11 -2
- package/src/store/chat/slices/aiChat/actions/__tests__/rag.test.ts +6 -6
- package/src/store/chat/slices/aiChat/actions/__tests__/streamingExecutor.test.ts +391 -0
- package/src/store/chat/slices/aiChat/actions/__tests__/streamingStates.test.ts +179 -0
- package/src/store/chat/slices/aiChat/actions/conversationControl.ts +157 -0
- package/src/store/chat/slices/aiChat/actions/conversationLifecycle.ts +329 -0
- package/src/store/chat/slices/aiChat/actions/generateAIGroupChat.ts +14 -14
- package/src/store/chat/slices/aiChat/actions/index.ts +12 -6
- package/src/store/chat/slices/aiChat/actions/rag.ts +9 -6
- package/src/store/chat/slices/aiChat/actions/streamingExecutor.ts +604 -0
- package/src/store/chat/slices/aiChat/actions/streamingStates.ts +84 -0
- package/src/store/chat/slices/builtinTool/actions/__tests__/localSystem.test.ts +4 -4
- package/src/store/chat/slices/builtinTool/actions/__tests__/search.test.ts +11 -11
- package/src/store/chat/slices/builtinTool/actions/interpreter.ts +8 -8
- package/src/store/chat/slices/builtinTool/actions/localSystem.ts +2 -2
- package/src/store/chat/slices/builtinTool/actions/search.ts +8 -8
- package/src/store/chat/slices/message/action.test.ts +79 -68
- package/src/store/chat/slices/message/actions/index.ts +39 -0
- package/src/store/chat/slices/message/actions/internals.ts +77 -0
- package/src/store/chat/slices/message/actions/optimisticUpdate.ts +260 -0
- package/src/store/chat/slices/message/actions/publicApi.ts +224 -0
- package/src/store/chat/slices/message/actions/query.ts +120 -0
- package/src/store/chat/slices/message/actions/runtimeState.ts +108 -0
- package/src/store/chat/slices/message/initialState.ts +13 -0
- package/src/store/chat/slices/message/reducer.test.ts +48 -370
- package/src/store/chat/slices/message/reducer.ts +17 -81
- package/src/store/chat/slices/message/selectors/chat.test.ts +13 -50
- package/src/store/chat/slices/message/selectors/chat.ts +78 -242
- package/src/store/chat/slices/message/selectors/dbMessage.ts +140 -0
- package/src/store/chat/slices/message/selectors/displayMessage.ts +301 -0
- package/src/store/chat/slices/message/selectors/messageState.ts +5 -2
- package/src/store/chat/slices/plugin/action.test.ts +62 -64
- package/src/store/chat/slices/plugin/action.ts +34 -28
- package/src/store/chat/slices/thread/action.test.ts +28 -31
- package/src/store/chat/slices/thread/action.ts +13 -10
- package/src/store/chat/slices/thread/selectors/index.ts +8 -6
- package/src/store/chat/slices/topic/reducer.ts +11 -3
- package/src/store/chat/store.ts +1 -1
- package/src/store/user/slices/preference/selectors/labPrefer.ts +0 -3
- package/packages/database/src/models/__tests__/message.grouping.test.ts +0 -812
- package/packages/database/src/utils/__tests__/groupMessages.test.ts +0 -1132
- package/packages/database/src/utils/groupMessages.ts +0 -361
- package/packages/utils/src/tokenizer/client.ts +0 -35
- package/packages/utils/src/tokenizer/estimated.ts +0 -4
- package/packages/utils/src/tokenizer/server.ts +0 -11
- package/packages/utils/src/tokenizer/tokenizer.worker.ts +0 -12
- package/src/app/(backend)/webapi/tokenizer/index.test.ts +0 -32
- package/src/app/(backend)/webapi/tokenizer/route.ts +0 -8
- package/src/features/Conversation/Error/InvalidAccessCode.tsx +0 -79
- package/src/store/chat/slices/aiChat/actions/__tests__/generateAIChat.test.ts +0 -975
- package/src/store/chat/slices/aiChat/actions/__tests__/generateAIChatV2.test.ts +0 -1050
- package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +0 -720
- package/src/store/chat/slices/aiChat/actions/generateAIChatV2.ts +0 -849
- package/src/store/chat/slices/message/action.ts +0 -629
|
@@ -1,720 +0,0 @@
|
|
|
1
|
-
/* eslint-disable sort-keys-fix/sort-keys-fix, typescript-sort-keys/interface */
|
|
2
|
-
// Disable the auto sort key eslint rule to make the code more logic and readable
|
|
3
|
-
import { LOADING_FLAT, MESSAGE_CANCEL_FLAT, isDesktop } from '@lobechat/const';
|
|
4
|
-
import { knowledgeBaseQAPrompts } from '@lobechat/prompts';
|
|
5
|
-
import {
|
|
6
|
-
ChatImageItem,
|
|
7
|
-
CreateMessageParams,
|
|
8
|
-
MessageSemanticSearchChunk,
|
|
9
|
-
TraceEventType,
|
|
10
|
-
TraceNameMap,
|
|
11
|
-
UIChatMessage,
|
|
12
|
-
} from '@lobechat/types';
|
|
13
|
-
import isEqual from 'fast-deep-equal';
|
|
14
|
-
import { t } from 'i18next';
|
|
15
|
-
import { produce } from 'immer';
|
|
16
|
-
import { throttle } from 'lodash-es';
|
|
17
|
-
import { StateCreator } from 'zustand/vanilla';
|
|
18
|
-
|
|
19
|
-
import { chatService } from '@/services/chat';
|
|
20
|
-
import { messageService } from '@/services/message';
|
|
21
|
-
import { agentChatConfigSelectors, agentSelectors } from '@/store/agent/selectors';
|
|
22
|
-
import { getAgentStoreState } from '@/store/agent/store';
|
|
23
|
-
import { aiModelSelectors, aiProviderSelectors } from '@/store/aiInfra';
|
|
24
|
-
import { getAiInfraStoreState } from '@/store/aiInfra/store';
|
|
25
|
-
import { ChatStore } from '@/store/chat/store';
|
|
26
|
-
import { getFileStoreState } from '@/store/file/store';
|
|
27
|
-
import { WebBrowsingManifest } from '@/tools/web-browsing';
|
|
28
|
-
import { Action, setNamespace } from '@/utils/storeDebug';
|
|
29
|
-
|
|
30
|
-
import { chatSelectors, messageStateSelectors, topicSelectors } from '../../../selectors';
|
|
31
|
-
|
|
32
|
-
const n = setNamespace('ai');
|
|
33
|
-
|
|
34
|
-
interface ProcessMessageParams {
|
|
35
|
-
traceId?: string;
|
|
36
|
-
isWelcomeQuestion?: boolean;
|
|
37
|
-
inSearchWorkflow?: boolean;
|
|
38
|
-
/**
|
|
39
|
-
* the RAG query content, should be embedding and used in the semantic search
|
|
40
|
-
*/
|
|
41
|
-
ragQuery?: string;
|
|
42
|
-
threadId?: string;
|
|
43
|
-
inPortalThread?: boolean;
|
|
44
|
-
|
|
45
|
-
groupId?: string;
|
|
46
|
-
agentId?: string;
|
|
47
|
-
agentConfig?: any; // Agent configuration for group chat agents
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
export interface AIGenerateAction {
|
|
51
|
-
/**
|
|
52
|
-
* Regenerates a specific message in the chat
|
|
53
|
-
*/
|
|
54
|
-
regenerateMessage: (id: string) => Promise<void>;
|
|
55
|
-
/**
|
|
56
|
-
* Deletes an existing message and generates a new one in its place
|
|
57
|
-
*/
|
|
58
|
-
delAndRegenerateMessage: (id: string) => Promise<void>;
|
|
59
|
-
/**
|
|
60
|
-
* Interrupts the ongoing ai message generation process
|
|
61
|
-
*/
|
|
62
|
-
stopGenerateMessage: () => void;
|
|
63
|
-
|
|
64
|
-
// ========= ↓ Internal Method ↓ ========== //
|
|
65
|
-
// ========================================== //
|
|
66
|
-
// ========================================== //
|
|
67
|
-
|
|
68
|
-
/**
|
|
69
|
-
* Executes the core processing logic for AI messages
|
|
70
|
-
* including preprocessing and postprocessing steps
|
|
71
|
-
*/
|
|
72
|
-
internal_coreProcessMessage: (
|
|
73
|
-
messages: UIChatMessage[],
|
|
74
|
-
parentId: string,
|
|
75
|
-
params?: ProcessMessageParams,
|
|
76
|
-
) => Promise<void>;
|
|
77
|
-
/**
|
|
78
|
-
* Retrieves an AI-generated chat message from the backend service
|
|
79
|
-
*/
|
|
80
|
-
internal_fetchAIChatMessage: (input: {
|
|
81
|
-
messages: UIChatMessage[];
|
|
82
|
-
messageId: string;
|
|
83
|
-
params?: ProcessMessageParams;
|
|
84
|
-
model: string;
|
|
85
|
-
provider: string;
|
|
86
|
-
}) => Promise<{
|
|
87
|
-
isFunctionCall: boolean;
|
|
88
|
-
content: string;
|
|
89
|
-
traceId?: string;
|
|
90
|
-
}>;
|
|
91
|
-
/**
|
|
92
|
-
* Resends a specific message, optionally using a trace ID for tracking
|
|
93
|
-
*/
|
|
94
|
-
internal_resendMessage: (
|
|
95
|
-
id: string,
|
|
96
|
-
params?: {
|
|
97
|
-
traceId?: string;
|
|
98
|
-
messages?: UIChatMessage[];
|
|
99
|
-
threadId?: string;
|
|
100
|
-
inPortalThread?: boolean;
|
|
101
|
-
},
|
|
102
|
-
) => Promise<void>;
|
|
103
|
-
/**
|
|
104
|
-
* Toggles the loading state for AI message generation, managing the UI feedback
|
|
105
|
-
*/
|
|
106
|
-
internal_toggleChatLoading: (
|
|
107
|
-
loading: boolean,
|
|
108
|
-
id?: string,
|
|
109
|
-
action?: Action,
|
|
110
|
-
) => AbortController | undefined;
|
|
111
|
-
internal_toggleMessageInToolsCalling: (
|
|
112
|
-
loading: boolean,
|
|
113
|
-
id?: string,
|
|
114
|
-
action?: Action,
|
|
115
|
-
) => AbortController | undefined;
|
|
116
|
-
/**
|
|
117
|
-
* Controls the streaming state of tool calling processes, updating the UI accordingly
|
|
118
|
-
*/
|
|
119
|
-
internal_toggleToolCallingStreaming: (id: string, streaming: boolean[] | undefined) => void;
|
|
120
|
-
/**
|
|
121
|
-
* Toggles the loading state for AI message reasoning, managing the UI feedback
|
|
122
|
-
*/
|
|
123
|
-
internal_toggleChatReasoning: (
|
|
124
|
-
loading: boolean,
|
|
125
|
-
id?: string,
|
|
126
|
-
action?: string,
|
|
127
|
-
) => AbortController | undefined;
|
|
128
|
-
|
|
129
|
-
internal_toggleSearchWorkflow: (loading: boolean, id?: string) => void;
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
export const generateAIChat: StateCreator<
|
|
133
|
-
ChatStore,
|
|
134
|
-
[['zustand/devtools', never]],
|
|
135
|
-
[],
|
|
136
|
-
AIGenerateAction
|
|
137
|
-
> = (set, get) => ({
|
|
138
|
-
delAndRegenerateMessage: async (id) => {
|
|
139
|
-
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
|
140
|
-
get().internal_resendMessage(id, { traceId });
|
|
141
|
-
get().deleteMessage(id);
|
|
142
|
-
|
|
143
|
-
// trace the delete and regenerate message
|
|
144
|
-
get().internal_traceMessage(id, { eventType: TraceEventType.DeleteAndRegenerateMessage });
|
|
145
|
-
},
|
|
146
|
-
regenerateMessage: async (id) => {
|
|
147
|
-
const traceId = chatSelectors.getTraceIdByMessageId(id)(get());
|
|
148
|
-
await get().internal_resendMessage(id, { traceId });
|
|
149
|
-
|
|
150
|
-
// trace the delete and regenerate message
|
|
151
|
-
get().internal_traceMessage(id, { eventType: TraceEventType.RegenerateMessage });
|
|
152
|
-
},
|
|
153
|
-
|
|
154
|
-
stopGenerateMessage: () => {
|
|
155
|
-
const { chatLoadingIdsAbortController, internal_toggleChatLoading } = get();
|
|
156
|
-
|
|
157
|
-
if (!chatLoadingIdsAbortController) return;
|
|
158
|
-
|
|
159
|
-
chatLoadingIdsAbortController.abort(MESSAGE_CANCEL_FLAT);
|
|
160
|
-
|
|
161
|
-
internal_toggleChatLoading(false, undefined, n('stopGenerateMessage') as string);
|
|
162
|
-
},
|
|
163
|
-
|
|
164
|
-
// the internal process method of the AI message
|
|
165
|
-
internal_coreProcessMessage: async (originalMessages, userMessageId, params) => {
|
|
166
|
-
const { internal_fetchAIChatMessage, triggerToolCalls, refreshMessages, activeTopicId } = get();
|
|
167
|
-
|
|
168
|
-
// create a new array to avoid the original messages array change
|
|
169
|
-
const messages = [...originalMessages];
|
|
170
|
-
|
|
171
|
-
const agentStoreState = getAgentStoreState();
|
|
172
|
-
const { model, provider, chatConfig } = agentSelectors.currentAgentConfig(agentStoreState);
|
|
173
|
-
|
|
174
|
-
let fileChunks: MessageSemanticSearchChunk[] | undefined;
|
|
175
|
-
let ragQueryId;
|
|
176
|
-
|
|
177
|
-
// go into RAG flow if there is ragQuery flag
|
|
178
|
-
if (params?.ragQuery) {
|
|
179
|
-
// 1. get the relative chunks from semantic search
|
|
180
|
-
const { chunks, queryId, rewriteQuery } = await get().internal_retrieveChunks(
|
|
181
|
-
userMessageId,
|
|
182
|
-
params?.ragQuery,
|
|
183
|
-
// should skip the last content
|
|
184
|
-
messages.map((m) => m.content).slice(0, messages.length - 1),
|
|
185
|
-
);
|
|
186
|
-
|
|
187
|
-
ragQueryId = queryId;
|
|
188
|
-
|
|
189
|
-
const lastMsg = messages.pop() as UIChatMessage;
|
|
190
|
-
|
|
191
|
-
// 2. build the retrieve context messages
|
|
192
|
-
const knowledgeBaseQAContext = knowledgeBaseQAPrompts({
|
|
193
|
-
chunks,
|
|
194
|
-
userQuery: lastMsg.content,
|
|
195
|
-
rewriteQuery,
|
|
196
|
-
knowledge: agentSelectors.currentEnabledKnowledge(agentStoreState),
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
// 3. add the retrieve context messages to the messages history
|
|
200
|
-
messages.push({
|
|
201
|
-
...lastMsg,
|
|
202
|
-
content: (lastMsg.content + '\n\n' + knowledgeBaseQAContext).trim(),
|
|
203
|
-
});
|
|
204
|
-
|
|
205
|
-
fileChunks = chunks.map((c) => ({ id: c.id, similarity: c.similarity }));
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
// 2. Add an empty message to place the AI response
|
|
209
|
-
const assistantMessage: CreateMessageParams = {
|
|
210
|
-
role: 'assistant',
|
|
211
|
-
content: LOADING_FLAT,
|
|
212
|
-
fromModel: model,
|
|
213
|
-
fromProvider: provider,
|
|
214
|
-
|
|
215
|
-
parentId: userMessageId,
|
|
216
|
-
sessionId: get().activeId,
|
|
217
|
-
topicId: activeTopicId, // if there is activeTopicId,then add it to topicId
|
|
218
|
-
threadId: params?.threadId,
|
|
219
|
-
fileChunks,
|
|
220
|
-
ragQueryId,
|
|
221
|
-
};
|
|
222
|
-
|
|
223
|
-
const result = await get().internal_createMessage(assistantMessage);
|
|
224
|
-
|
|
225
|
-
if (!result) return;
|
|
226
|
-
const assistantId = result.id;
|
|
227
|
-
|
|
228
|
-
// 3. place a search with the search working model if this model is not support tool use
|
|
229
|
-
const aiInfraStoreState = getAiInfraStoreState();
|
|
230
|
-
const isModelSupportToolUse = aiModelSelectors.isModelSupportToolUse(
|
|
231
|
-
model,
|
|
232
|
-
provider!,
|
|
233
|
-
)(aiInfraStoreState);
|
|
234
|
-
const isProviderHasBuiltinSearch = aiProviderSelectors.isProviderHasBuiltinSearch(provider!)(
|
|
235
|
-
aiInfraStoreState,
|
|
236
|
-
);
|
|
237
|
-
const isModelHasBuiltinSearch = aiModelSelectors.isModelHasBuiltinSearch(
|
|
238
|
-
model,
|
|
239
|
-
provider!,
|
|
240
|
-
)(aiInfraStoreState);
|
|
241
|
-
const isModelBuiltinSearchInternal = aiModelSelectors.isModelBuiltinSearchInternal(
|
|
242
|
-
model,
|
|
243
|
-
provider!,
|
|
244
|
-
)(aiInfraStoreState);
|
|
245
|
-
const useModelBuiltinSearch = agentChatConfigSelectors.useModelBuiltinSearch(agentStoreState);
|
|
246
|
-
const useModelSearch =
|
|
247
|
-
((isProviderHasBuiltinSearch || isModelHasBuiltinSearch) && useModelBuiltinSearch) ||
|
|
248
|
-
isModelBuiltinSearchInternal;
|
|
249
|
-
const isAgentEnableSearch = agentChatConfigSelectors.isAgentEnableSearch(agentStoreState);
|
|
250
|
-
|
|
251
|
-
if (isAgentEnableSearch && !useModelSearch && !isModelSupportToolUse) {
|
|
252
|
-
const { model, provider } = agentChatConfigSelectors.searchFCModel(agentStoreState);
|
|
253
|
-
|
|
254
|
-
let isToolsCalling = false;
|
|
255
|
-
let isError = false;
|
|
256
|
-
|
|
257
|
-
const abortController = get().internal_toggleChatLoading(
|
|
258
|
-
true,
|
|
259
|
-
assistantId,
|
|
260
|
-
n('generateMessage(start)', { messageId: assistantId, messages }),
|
|
261
|
-
);
|
|
262
|
-
|
|
263
|
-
get().internal_toggleSearchWorkflow(true, assistantId);
|
|
264
|
-
await chatService.fetchPresetTaskResult({
|
|
265
|
-
params: { messages, model, provider, plugins: [WebBrowsingManifest.identifier] },
|
|
266
|
-
onFinish: async (_, { toolCalls, usage }) => {
|
|
267
|
-
if (toolCalls && toolCalls.length > 0) {
|
|
268
|
-
get().internal_toggleToolCallingStreaming(assistantId, undefined);
|
|
269
|
-
// update tools calling
|
|
270
|
-
await get().internal_updateMessageContent(assistantId, '', {
|
|
271
|
-
toolCalls,
|
|
272
|
-
metadata: usage,
|
|
273
|
-
model,
|
|
274
|
-
provider,
|
|
275
|
-
});
|
|
276
|
-
}
|
|
277
|
-
},
|
|
278
|
-
trace: {
|
|
279
|
-
traceId: params?.traceId,
|
|
280
|
-
sessionId: get().activeId,
|
|
281
|
-
topicId: get().activeTopicId,
|
|
282
|
-
traceName: TraceNameMap.SearchIntentRecognition,
|
|
283
|
-
},
|
|
284
|
-
abortController,
|
|
285
|
-
onMessageHandle: async (chunk) => {
|
|
286
|
-
if (chunk.type === 'tool_calls') {
|
|
287
|
-
get().internal_toggleSearchWorkflow(false, assistantId);
|
|
288
|
-
get().internal_toggleToolCallingStreaming(assistantId, chunk.isAnimationActives);
|
|
289
|
-
get().internal_dispatchMessage({
|
|
290
|
-
id: assistantId,
|
|
291
|
-
type: 'updateMessage',
|
|
292
|
-
value: { tools: get().internal_transformToolCalls(chunk.tool_calls) },
|
|
293
|
-
});
|
|
294
|
-
isToolsCalling = true;
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
if (chunk.type === 'text') {
|
|
298
|
-
abortController!.abort('not fc');
|
|
299
|
-
}
|
|
300
|
-
},
|
|
301
|
-
onErrorHandle: async (error) => {
|
|
302
|
-
isError = true;
|
|
303
|
-
await messageService.updateMessageError(assistantId, error);
|
|
304
|
-
await refreshMessages();
|
|
305
|
-
},
|
|
306
|
-
});
|
|
307
|
-
|
|
308
|
-
get().internal_toggleChatLoading(
|
|
309
|
-
false,
|
|
310
|
-
assistantId,
|
|
311
|
-
n('generateMessage(start)', { messageId: assistantId, messages }),
|
|
312
|
-
);
|
|
313
|
-
get().internal_toggleSearchWorkflow(false, assistantId);
|
|
314
|
-
|
|
315
|
-
// if there is error, then stop
|
|
316
|
-
if (isError) return;
|
|
317
|
-
|
|
318
|
-
// if it's the function call message, trigger the function method
|
|
319
|
-
if (isToolsCalling) {
|
|
320
|
-
get().internal_toggleMessageInToolsCalling(true, assistantId);
|
|
321
|
-
await refreshMessages();
|
|
322
|
-
await triggerToolCalls(assistantId, {
|
|
323
|
-
threadId: params?.threadId,
|
|
324
|
-
inPortalThread: params?.inPortalThread,
|
|
325
|
-
});
|
|
326
|
-
|
|
327
|
-
// then story the workflow
|
|
328
|
-
return;
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
// 4. fetch the AI response
|
|
333
|
-
const { isFunctionCall, content } = await internal_fetchAIChatMessage({
|
|
334
|
-
messages,
|
|
335
|
-
messageId: assistantId,
|
|
336
|
-
params,
|
|
337
|
-
model,
|
|
338
|
-
provider: provider!,
|
|
339
|
-
});
|
|
340
|
-
|
|
341
|
-
// 5. if it's the function call message, trigger the function method
|
|
342
|
-
if (isFunctionCall) {
|
|
343
|
-
get().internal_toggleMessageInToolsCalling(true, assistantId);
|
|
344
|
-
await refreshMessages();
|
|
345
|
-
await triggerToolCalls(assistantId, {
|
|
346
|
-
threadId: params?.threadId,
|
|
347
|
-
inPortalThread: params?.inPortalThread,
|
|
348
|
-
});
|
|
349
|
-
} else {
|
|
350
|
-
// 显示桌面通知(仅在桌面端且窗口隐藏时)
|
|
351
|
-
if (isDesktop) {
|
|
352
|
-
try {
|
|
353
|
-
// 动态导入桌面通知服务,避免在非桌面端环境中导入
|
|
354
|
-
const { desktopNotificationService } = await import(
|
|
355
|
-
'@/services/electron/desktopNotification'
|
|
356
|
-
);
|
|
357
|
-
|
|
358
|
-
await desktopNotificationService.showNotification({
|
|
359
|
-
body: content,
|
|
360
|
-
title: t('notification.finishChatGeneration', { ns: 'electron' }),
|
|
361
|
-
});
|
|
362
|
-
} catch (error) {
|
|
363
|
-
// 静默处理错误,不影响正常流程
|
|
364
|
-
console.error('Desktop notification error:', error);
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
// 6. summary history if context messages is larger than historyCount
|
|
370
|
-
const historyCount = agentChatConfigSelectors.historyCount(agentStoreState);
|
|
371
|
-
|
|
372
|
-
if (
|
|
373
|
-
agentChatConfigSelectors.enableHistoryCount(agentStoreState) &&
|
|
374
|
-
chatConfig.enableCompressHistory &&
|
|
375
|
-
originalMessages.length > historyCount
|
|
376
|
-
) {
|
|
377
|
-
// after generation: [u1,a1,u2,a2,u3,a3]
|
|
378
|
-
// but the `originalMessages` is still: [u1,a1,u2,a2,u3]
|
|
379
|
-
// So if historyCount=2, we need to summary [u1,a1,u2,a2]
|
|
380
|
-
// because user find UI is [u1,a1,u2,a2 | u3,a3]
|
|
381
|
-
const historyMessages = originalMessages.slice(0, -historyCount + 1);
|
|
382
|
-
|
|
383
|
-
await get().internal_summaryHistory(historyMessages);
|
|
384
|
-
}
|
|
385
|
-
},
|
|
386
|
-
internal_fetchAIChatMessage: async ({ messages, messageId, params, provider, model }) => {
|
|
387
|
-
const {
|
|
388
|
-
internal_toggleChatLoading,
|
|
389
|
-
refreshMessages,
|
|
390
|
-
internal_updateMessageContent,
|
|
391
|
-
internal_dispatchMessage,
|
|
392
|
-
internal_toggleToolCallingStreaming,
|
|
393
|
-
internal_toggleChatReasoning,
|
|
394
|
-
} = get();
|
|
395
|
-
|
|
396
|
-
const abortController = internal_toggleChatLoading(
|
|
397
|
-
true,
|
|
398
|
-
messageId,
|
|
399
|
-
n('generateMessage(start)', { messageId, messages }),
|
|
400
|
-
);
|
|
401
|
-
|
|
402
|
-
const agentConfig =
|
|
403
|
-
params?.agentConfig || agentSelectors.currentAgentConfig(getAgentStoreState());
|
|
404
|
-
const chatConfig = agentChatConfigSelectors.currentChatConfig(getAgentStoreState());
|
|
405
|
-
|
|
406
|
-
// ================================== //
|
|
407
|
-
// messages uniformly preprocess //
|
|
408
|
-
// ================================== //
|
|
409
|
-
// 4. handle max_tokens
|
|
410
|
-
agentConfig.params.max_tokens = chatConfig.enableMaxTokens
|
|
411
|
-
? agentConfig.params.max_tokens
|
|
412
|
-
: undefined;
|
|
413
|
-
|
|
414
|
-
// 5. handle reasoning_effort
|
|
415
|
-
agentConfig.params.reasoning_effort = chatConfig.enableReasoningEffort
|
|
416
|
-
? agentConfig.params.reasoning_effort
|
|
417
|
-
: undefined;
|
|
418
|
-
|
|
419
|
-
let isFunctionCall = false;
|
|
420
|
-
let msgTraceId: string | undefined;
|
|
421
|
-
let output = '';
|
|
422
|
-
let thinking = '';
|
|
423
|
-
let thinkingStartAt: number;
|
|
424
|
-
let duration: number;
|
|
425
|
-
// to upload image
|
|
426
|
-
const uploadTasks: Map<string, Promise<{ id?: string; url?: string }>> = new Map();
|
|
427
|
-
|
|
428
|
-
// Throttle tool_calls updates to prevent excessive re-renders (max once per 300ms)
|
|
429
|
-
const throttledUpdateToolCalls = throttle(
|
|
430
|
-
(toolCalls: any[]) => {
|
|
431
|
-
internal_dispatchMessage({
|
|
432
|
-
id: messageId,
|
|
433
|
-
type: 'updateMessage',
|
|
434
|
-
value: { tools: get().internal_transformToolCalls(toolCalls) },
|
|
435
|
-
});
|
|
436
|
-
},
|
|
437
|
-
300,
|
|
438
|
-
{ leading: true, trailing: true },
|
|
439
|
-
);
|
|
440
|
-
|
|
441
|
-
const historySummary = chatConfig.enableCompressHistory
|
|
442
|
-
? topicSelectors.currentActiveTopicSummary(get())
|
|
443
|
-
: undefined;
|
|
444
|
-
await chatService.createAssistantMessageStream({
|
|
445
|
-
abortController,
|
|
446
|
-
params: {
|
|
447
|
-
messages,
|
|
448
|
-
model,
|
|
449
|
-
provider,
|
|
450
|
-
...agentConfig.params,
|
|
451
|
-
plugins: agentConfig.plugins,
|
|
452
|
-
},
|
|
453
|
-
historySummary: historySummary?.content,
|
|
454
|
-
trace: {
|
|
455
|
-
traceId: params?.traceId,
|
|
456
|
-
sessionId: get().activeId,
|
|
457
|
-
topicId: get().activeTopicId,
|
|
458
|
-
traceName: TraceNameMap.Conversation,
|
|
459
|
-
},
|
|
460
|
-
onErrorHandle: async (error) => {
|
|
461
|
-
await messageService.updateMessageError(messageId, error);
|
|
462
|
-
await refreshMessages();
|
|
463
|
-
},
|
|
464
|
-
onFinish: async (
|
|
465
|
-
content,
|
|
466
|
-
{ traceId, observationId, toolCalls, reasoning, grounding, usage, speed },
|
|
467
|
-
) => {
|
|
468
|
-
// if there is traceId, update it
|
|
469
|
-
if (traceId) {
|
|
470
|
-
msgTraceId = traceId;
|
|
471
|
-
messageService.updateMessage(messageId, {
|
|
472
|
-
traceId,
|
|
473
|
-
observationId: observationId ?? undefined,
|
|
474
|
-
});
|
|
475
|
-
}
|
|
476
|
-
|
|
477
|
-
// 等待所有图片上传完成
|
|
478
|
-
let finalImages: ChatImageItem[] = [];
|
|
479
|
-
|
|
480
|
-
if (uploadTasks.size > 0) {
|
|
481
|
-
try {
|
|
482
|
-
// 等待所有上传任务完成
|
|
483
|
-
const uploadResults = await Promise.all(uploadTasks.values());
|
|
484
|
-
|
|
485
|
-
// 使用上传后的 S3 URL 替换原始图像数据
|
|
486
|
-
finalImages = uploadResults.filter((i) => !!i.url) as ChatImageItem[];
|
|
487
|
-
} catch (error) {
|
|
488
|
-
console.error('Error waiting for image uploads:', error);
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
|
|
492
|
-
let parsedToolCalls = toolCalls;
|
|
493
|
-
if (parsedToolCalls && parsedToolCalls.length > 0) {
|
|
494
|
-
// Flush any pending throttled updates before finalizing
|
|
495
|
-
throttledUpdateToolCalls.flush();
|
|
496
|
-
internal_toggleToolCallingStreaming(messageId, undefined);
|
|
497
|
-
parsedToolCalls = parsedToolCalls.map((item) => ({
|
|
498
|
-
...item,
|
|
499
|
-
function: {
|
|
500
|
-
...item.function,
|
|
501
|
-
arguments: !!item.function.arguments ? item.function.arguments : '{}',
|
|
502
|
-
},
|
|
503
|
-
}));
|
|
504
|
-
isFunctionCall = true;
|
|
505
|
-
}
|
|
506
|
-
|
|
507
|
-
internal_toggleChatReasoning(false, messageId, n('toggleChatReasoning/false') as string);
|
|
508
|
-
|
|
509
|
-
// update the content after fetch result
|
|
510
|
-
await internal_updateMessageContent(messageId, content, {
|
|
511
|
-
toolCalls: parsedToolCalls,
|
|
512
|
-
reasoning: !!reasoning ? { ...reasoning, duration } : undefined,
|
|
513
|
-
search: !!grounding?.citations ? grounding : undefined,
|
|
514
|
-
imageList: finalImages.length > 0 ? finalImages : undefined,
|
|
515
|
-
metadata: speed ? { ...usage, ...speed } : usage,
|
|
516
|
-
});
|
|
517
|
-
},
|
|
518
|
-
onMessageHandle: async (chunk) => {
|
|
519
|
-
switch (chunk.type) {
|
|
520
|
-
case 'grounding': {
|
|
521
|
-
// if there is no citations, then stop
|
|
522
|
-
if (
|
|
523
|
-
!chunk.grounding ||
|
|
524
|
-
!chunk.grounding.citations ||
|
|
525
|
-
chunk.grounding.citations.length <= 0
|
|
526
|
-
)
|
|
527
|
-
return;
|
|
528
|
-
|
|
529
|
-
internal_dispatchMessage({
|
|
530
|
-
id: messageId,
|
|
531
|
-
type: 'updateMessage',
|
|
532
|
-
value: {
|
|
533
|
-
search: {
|
|
534
|
-
citations: chunk.grounding.citations,
|
|
535
|
-
searchQueries: chunk.grounding.searchQueries,
|
|
536
|
-
},
|
|
537
|
-
},
|
|
538
|
-
});
|
|
539
|
-
break;
|
|
540
|
-
}
|
|
541
|
-
|
|
542
|
-
case 'base64_image': {
|
|
543
|
-
internal_dispatchMessage({
|
|
544
|
-
id: messageId,
|
|
545
|
-
type: 'updateMessage',
|
|
546
|
-
value: {
|
|
547
|
-
imageList: chunk.images.map((i) => ({ id: i.id, url: i.data, alt: i.id })),
|
|
548
|
-
},
|
|
549
|
-
});
|
|
550
|
-
const image = chunk.image;
|
|
551
|
-
|
|
552
|
-
const task = getFileStoreState()
|
|
553
|
-
.uploadBase64FileWithProgress(image.data)
|
|
554
|
-
.then((value) => ({
|
|
555
|
-
id: value?.id,
|
|
556
|
-
url: value?.url,
|
|
557
|
-
alt: value?.filename || value?.id,
|
|
558
|
-
}));
|
|
559
|
-
|
|
560
|
-
uploadTasks.set(image.id, task);
|
|
561
|
-
|
|
562
|
-
break;
|
|
563
|
-
}
|
|
564
|
-
|
|
565
|
-
case 'text': {
|
|
566
|
-
output += chunk.text;
|
|
567
|
-
|
|
568
|
-
// if there is no duration, it means the end of reasoning
|
|
569
|
-
if (!duration) {
|
|
570
|
-
duration = Date.now() - thinkingStartAt;
|
|
571
|
-
|
|
572
|
-
const isInChatReasoning =
|
|
573
|
-
messageStateSelectors.isMessageInChatReasoning(messageId)(get());
|
|
574
|
-
if (isInChatReasoning) {
|
|
575
|
-
internal_toggleChatReasoning(
|
|
576
|
-
false,
|
|
577
|
-
messageId,
|
|
578
|
-
n('toggleChatReasoning/false') as string,
|
|
579
|
-
);
|
|
580
|
-
}
|
|
581
|
-
}
|
|
582
|
-
|
|
583
|
-
internal_dispatchMessage({
|
|
584
|
-
id: messageId,
|
|
585
|
-
type: 'updateMessage',
|
|
586
|
-
value: {
|
|
587
|
-
content: output,
|
|
588
|
-
reasoning: !!thinking ? { content: thinking, duration } : undefined,
|
|
589
|
-
},
|
|
590
|
-
});
|
|
591
|
-
break;
|
|
592
|
-
}
|
|
593
|
-
|
|
594
|
-
case 'reasoning': {
|
|
595
|
-
// if there is no thinkingStartAt, it means the start of reasoning
|
|
596
|
-
if (!thinkingStartAt) {
|
|
597
|
-
thinkingStartAt = Date.now();
|
|
598
|
-
internal_toggleChatReasoning(
|
|
599
|
-
true,
|
|
600
|
-
messageId,
|
|
601
|
-
n('toggleChatReasoning/true') as string,
|
|
602
|
-
);
|
|
603
|
-
}
|
|
604
|
-
|
|
605
|
-
thinking += chunk.text;
|
|
606
|
-
|
|
607
|
-
internal_dispatchMessage({
|
|
608
|
-
id: messageId,
|
|
609
|
-
type: 'updateMessage',
|
|
610
|
-
value: { reasoning: { content: thinking } },
|
|
611
|
-
});
|
|
612
|
-
break;
|
|
613
|
-
}
|
|
614
|
-
|
|
615
|
-
// is this message is just a tool call
|
|
616
|
-
case 'tool_calls': {
|
|
617
|
-
internal_toggleToolCallingStreaming(messageId, chunk.isAnimationActives);
|
|
618
|
-
throttledUpdateToolCalls(chunk.tool_calls);
|
|
619
|
-
isFunctionCall = true;
|
|
620
|
-
const isInChatReasoning =
|
|
621
|
-
messageStateSelectors.isMessageInChatReasoning(messageId)(get());
|
|
622
|
-
if (isInChatReasoning) {
|
|
623
|
-
internal_toggleChatReasoning(
|
|
624
|
-
false,
|
|
625
|
-
messageId,
|
|
626
|
-
n('toggleChatReasoning/false') as string,
|
|
627
|
-
);
|
|
628
|
-
}
|
|
629
|
-
}
|
|
630
|
-
}
|
|
631
|
-
},
|
|
632
|
-
});
|
|
633
|
-
|
|
634
|
-
internal_toggleChatLoading(false, messageId, n('generateMessage(end)') as string);
|
|
635
|
-
|
|
636
|
-
return { isFunctionCall, traceId: msgTraceId, content: output };
|
|
637
|
-
},
|
|
638
|
-
|
|
639
|
-
internal_resendMessage: async (
|
|
640
|
-
messageId,
|
|
641
|
-
{ traceId, messages: outChats, threadId: outThreadId, inPortalThread } = {},
|
|
642
|
-
) => {
|
|
643
|
-
// 1. 构造所有相关的历史记录
|
|
644
|
-
const chats = outChats ?? chatSelectors.mainAIChats(get());
|
|
645
|
-
|
|
646
|
-
const currentIndex = chats.findIndex((c) => c.id === messageId);
|
|
647
|
-
if (currentIndex < 0) return;
|
|
648
|
-
|
|
649
|
-
const currentMessage = chats[currentIndex];
|
|
650
|
-
|
|
651
|
-
let contextMessages: UIChatMessage[] = [];
|
|
652
|
-
|
|
653
|
-
switch (currentMessage.role) {
|
|
654
|
-
case 'tool':
|
|
655
|
-
case 'user': {
|
|
656
|
-
contextMessages = chats.slice(0, currentIndex + 1);
|
|
657
|
-
break;
|
|
658
|
-
}
|
|
659
|
-
case 'assistant': {
|
|
660
|
-
// 消息是 AI 发出的因此需要找到它的 user 消息
|
|
661
|
-
const userId = currentMessage.parentId;
|
|
662
|
-
const userIndex = chats.findIndex((c) => c.id === userId);
|
|
663
|
-
// 如果消息没有 parentId,那么同 user/function 模式
|
|
664
|
-
contextMessages = chats.slice(0, userIndex < 0 ? currentIndex + 1 : userIndex + 1);
|
|
665
|
-
break;
|
|
666
|
-
}
|
|
667
|
-
}
|
|
668
|
-
|
|
669
|
-
if (contextMessages.length <= 0) return;
|
|
670
|
-
|
|
671
|
-
const { internal_coreProcessMessage, activeThreadId } = get();
|
|
672
|
-
|
|
673
|
-
const latestMsg = contextMessages.findLast((s) => s.role === 'user');
|
|
674
|
-
|
|
675
|
-
if (!latestMsg) return;
|
|
676
|
-
|
|
677
|
-
const threadId = outThreadId ?? activeThreadId;
|
|
678
|
-
|
|
679
|
-
await internal_coreProcessMessage(contextMessages, latestMsg.id, {
|
|
680
|
-
traceId,
|
|
681
|
-
ragQuery: get().internal_shouldUseRAG() ? latestMsg.content : undefined,
|
|
682
|
-
threadId,
|
|
683
|
-
inPortalThread,
|
|
684
|
-
});
|
|
685
|
-
},
|
|
686
|
-
|
|
687
|
-
// ----- Loading ------- //
|
|
688
|
-
internal_toggleChatLoading: (loading, id, action) => {
|
|
689
|
-
return get().internal_toggleLoadingArrays('chatLoadingIds', loading, id, action);
|
|
690
|
-
},
|
|
691
|
-
internal_toggleMessageInToolsCalling: (loading, id) => {
|
|
692
|
-
return get().internal_toggleLoadingArrays('messageInToolsCallingIds', loading, id);
|
|
693
|
-
},
|
|
694
|
-
internal_toggleChatReasoning: (loading, id, action) => {
|
|
695
|
-
return get().internal_toggleLoadingArrays('reasoningLoadingIds', loading, id, action);
|
|
696
|
-
},
|
|
697
|
-
internal_toggleToolCallingStreaming: (id, streaming) => {
|
|
698
|
-
const previous = get().toolCallingStreamIds;
|
|
699
|
-
const next = produce(previous, (draft) => {
|
|
700
|
-
if (!!streaming) {
|
|
701
|
-
draft[id] = streaming;
|
|
702
|
-
} else {
|
|
703
|
-
delete draft[id];
|
|
704
|
-
}
|
|
705
|
-
});
|
|
706
|
-
|
|
707
|
-
if (isEqual(previous, next)) return;
|
|
708
|
-
|
|
709
|
-
set(
|
|
710
|
-
{ toolCallingStreamIds: next },
|
|
711
|
-
|
|
712
|
-
false,
|
|
713
|
-
`toggleToolCallingStreaming/${!!streaming ? 'start' : 'end'}`,
|
|
714
|
-
);
|
|
715
|
-
},
|
|
716
|
-
|
|
717
|
-
internal_toggleSearchWorkflow: (loading, id) => {
|
|
718
|
-
return get().internal_toggleLoadingArrays('searchWorkflowLoadingIds', loading, id);
|
|
719
|
-
},
|
|
720
|
-
});
|