@lobehub/chat 1.33.1 → 1.33.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,31 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.33.2](https://github.com/lobehub/lobe-chat/compare/v1.33.1...v1.33.2)
6
+
7
+ <sup>Released on **2024-11-25**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Fix multi-turns tools calling.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix multi-turns tools calling, closes [#4789](https://github.com/lobehub/lobe-chat/issues/4789) ([9d8845f](https://github.com/lobehub/lobe-chat/commit/9d8845f))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
5
30
  ### [Version 1.33.1](https://github.com/lobehub/lobe-chat/compare/v1.33.0...v1.33.1)
6
31
 
7
32
  <sup>Released on **2024-11-25**</sup>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.33.1",
3
+ "version": "1.33.2",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -207,7 +207,7 @@ export const generateAIChat: StateCreator<
207
207
  }
208
208
 
209
209
  // Get the current messages to generate AI response
210
- const messages = chatSelectors.mainDisplayChats(get());
210
+ const messages = chatSelectors.activeBaseChats(get());
211
211
  const userFiles = chatSelectors.currentUserFiles(get()).map((f) => f.id);
212
212
 
213
213
  await internal_coreProcessMessage(messages, id, {
@@ -484,7 +484,7 @@ export const generateAIChat: StateCreator<
484
484
 
485
485
  internal_resendMessage: async (messageId, traceId) => {
486
486
  // 1. 构造所有相关的历史记录
487
- const chats = chatSelectors.mainDisplayChats(get());
487
+ const chats = chatSelectors.activeBaseChats(get());
488
488
 
489
489
  const currentIndex = chats.findIndex((c) => c.id === messageId);
490
490
  if (currentIndex < 0) return;
@@ -232,6 +232,19 @@ describe('chatSelectors', () => {
232
232
  });
233
233
  });
234
234
 
235
+ describe('mainDisplayChats', () => {
236
+ it('should return existing messages except tool message', () => {
237
+ const state = merge(initialStore, {
238
+ messagesMap: {
239
+ [messageMapKey('someActiveId')]: mockMessages,
240
+ },
241
+ activeId: 'someActiveId',
242
+ });
243
+ const chats = chatSelectors.mainDisplayChats(state);
244
+ expect(chats).toEqual(mockedChats.slice(0, 2));
245
+ });
246
+ });
247
+
235
248
  describe('chatsMessageString', () => {
236
249
  it('should concatenate the contents of all messages returned by currentChatsWithHistoryConfig', () => {
237
250
  // Prepare a state with a few messages