@lobehub/chat 1.33.0 → 1.33.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.33.2](https://github.com/lobehub/lobe-chat/compare/v1.33.1...v1.33.2)
6
+
7
+ <sup>Released on **2024-11-25**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Fix multi-turns tools calling.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix multi-turns tools calling, closes [#4789](https://github.com/lobehub/lobe-chat/issues/4789) ([9d8845f](https://github.com/lobehub/lobe-chat/commit/9d8845f))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.33.1](https://github.com/lobehub/lobe-chat/compare/v1.33.0...v1.33.1)
31
+
32
+ <sup>Released on **2024-11-25**</sup>
33
+
34
+ #### 💄 Styles
35
+
36
+ - **misc**: Add gemini-exp-1121 model.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### Styles
44
+
45
+ - **misc**: Add gemini-exp-1121 model, closes [#4783](https://github.com/lobehub/lobe-chat/issues/4783) ([3b2cd88](https://github.com/lobehub/lobe-chat/commit/3b2cd88))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ## [Version 1.33.0](https://github.com/lobehub/lobe-chat/compare/v1.32.9...v1.33.0)
6
56
 
7
57
  <sup>Released on **2024-11-25**</sup>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.33.0",
3
+ "version": "1.33.2",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -3,6 +3,23 @@ import { ModelProviderCard } from '@/types/llm';
3
3
  // ref: https://ai.google.dev/gemini-api/docs/models/gemini
4
4
  const Google: ModelProviderCard = {
5
5
  chatModels: [
6
+ {
7
+ description:
8
+ 'Gemini Exp 1121 是Google最新的实验性多模态AI模型,具备快速处理能力,支持文本、图像和视频输入,适用于多种任务的高效扩展。',
9
+ displayName: 'Gemini Experimental 1121',
10
+ enabled: true,
11
+ functionCall: true,
12
+ id: 'gemini-exp-1121',
13
+ maxOutput: 8192,
14
+ pricing: {
15
+ cachedInput: 0,
16
+ input: 0,
17
+ output: 0,
18
+ },
19
+ releasedAt: '2024-11-21',
20
+ tokens: 32_767 + 8192,
21
+ vision: true,
22
+ },
6
23
  {
7
24
  description:
8
25
  'Gemini Exp 1114 是Google最新的实验性多模态AI模型,具备快速处理能力,支持文本、图像和视频输入,适用于多种任务的高效扩展。',
@@ -207,7 +207,7 @@ export const generateAIChat: StateCreator<
207
207
  }
208
208
 
209
209
  // Get the current messages to generate AI response
210
- const messages = chatSelectors.mainDisplayChats(get());
210
+ const messages = chatSelectors.activeBaseChats(get());
211
211
  const userFiles = chatSelectors.currentUserFiles(get()).map((f) => f.id);
212
212
 
213
213
  await internal_coreProcessMessage(messages, id, {
@@ -484,7 +484,7 @@ export const generateAIChat: StateCreator<
484
484
 
485
485
  internal_resendMessage: async (messageId, traceId) => {
486
486
  // 1. 构造所有相关的历史记录
487
- const chats = chatSelectors.mainDisplayChats(get());
487
+ const chats = chatSelectors.activeBaseChats(get());
488
488
 
489
489
  const currentIndex = chats.findIndex((c) => c.id === messageId);
490
490
  if (currentIndex < 0) return;
@@ -232,6 +232,19 @@ describe('chatSelectors', () => {
232
232
  });
233
233
  });
234
234
 
235
+ describe('mainDisplayChats', () => {
236
+ it('should return existing messages except tool message', () => {
237
+ const state = merge(initialStore, {
238
+ messagesMap: {
239
+ [messageMapKey('someActiveId')]: mockMessages,
240
+ },
241
+ activeId: 'someActiveId',
242
+ });
243
+ const chats = chatSelectors.mainDisplayChats(state);
244
+ expect(chats).toEqual(mockedChats.slice(0, 2));
245
+ });
246
+ });
247
+
235
248
  describe('chatsMessageString', () => {
236
249
  it('should concatenate the contents of all messages returned by currentChatsWithHistoryConfig', () => {
237
250
  // Prepare a state with a few messages