@lobehub/chat 1.84.14 → 1.84.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.84.16](https://github.com/lobehub/lobe-chat/compare/v1.84.15...v1.84.16)
6
+
7
+ <sup>Released on **2025-05-02**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Fix desktop quiting with reopen window.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix desktop quiting with reopen window, closes [#7675](https://github.com/lobehub/lobe-chat/issues/7675) ([edeabcf](https://github.com/lobehub/lobe-chat/commit/edeabcf))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.84.15](https://github.com/lobehub/lobe-chat/compare/v1.84.14...v1.84.15)
31
+
32
+ <sup>Released on **2025-05-01**</sup>
33
+
34
+ #### 🐛 Bug Fixes
35
+
36
+ - **misc**: Siliconflow requests with tools no longer force non-streaming.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### What's fixed
44
+
45
+ - **misc**: Siliconflow requests with tools no longer force non-streaming, closes [#7663](https://github.com/lobehub/lobe-chat/issues/7663) ([77777b2](https://github.com/lobehub/lobe-chat/commit/77777b2))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ### [Version 1.84.14](https://github.com/lobehub/lobe-chat/compare/v1.84.13...v1.84.14)
6
56
 
7
57
  <sup>Released on **2025-05-01**</sup>
@@ -55,6 +55,12 @@ export default class Browser {
55
55
  return this.retrieveOrInitialize();
56
56
  }
57
57
 
58
+ get webContents() {
59
+ if (this._browserWindow.isDestroyed()) return null;
60
+
61
+ return this._browserWindow.webContents;
62
+ }
63
+
58
64
  /**
59
65
  * Method to construct BrowserWindows object
60
66
  * @param options
@@ -210,7 +216,6 @@ export default class Browser {
210
216
  session: browserWindow.webContents.session,
211
217
  });
212
218
 
213
- console.log('platform:',process.platform);
214
219
  // Windows 11 can use this new API
215
220
  if (process.platform === 'win32' && browserWindow.setBackgroundMaterial) {
216
221
  logger.debug(`[${this.identifier}] Setting window background material for Windows 11`);
@@ -157,8 +157,8 @@ export default class BrowserManager {
157
157
  this.webContentsMap.set(browser.browserWindow.webContents, identifier);
158
158
 
159
159
  // 当窗口关闭时清理映射
160
- browser.browserWindow.on('closed', () => {
161
- this.webContentsMap.delete(browser.browserWindow.webContents);
160
+ browser.browserWindow.on('close', () => {
161
+ if (browser.webContents) this.webContentsMap.delete(browser.webContents);
162
162
  });
163
163
 
164
164
  return browser;
package/changelog/v1.json CHANGED
@@ -1,4 +1,22 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "fixes": [
5
+ "Fix desktop quiting with reopen window."
6
+ ]
7
+ },
8
+ "date": "2025-05-02",
9
+ "version": "1.84.16"
10
+ },
11
+ {
12
+ "children": {
13
+ "fixes": [
14
+ "Siliconflow requests with tools no longer force non-streaming."
15
+ ]
16
+ },
17
+ "date": "2025-05-01",
18
+ "version": "1.84.15"
19
+ },
2
20
  {
3
21
  "children": {
4
22
  "improvements": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.84.14",
3
+ "version": "1.84.16",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -147,7 +147,7 @@
147
147
  "@lobehub/icons": "^2.0.0",
148
148
  "@lobehub/tts": "^2.0.0",
149
149
  "@lobehub/ui": "^2.0.10",
150
- "@modelcontextprotocol/sdk": "^1.10.1",
150
+ "@modelcontextprotocol/sdk": "^1.11.0",
151
151
  "@neondatabase/serverless": "^1.0.0",
152
152
  "@next/third-parties": "^15.3.0",
153
153
  "@react-spring/web": "^9.7.5",
@@ -163,6 +163,7 @@
163
163
  "@vercel/edge-config": "^1.4.0",
164
164
  "@vercel/functions": "^2.0.0",
165
165
  "@vercel/speed-insights": "^1.2.0",
166
+ "@xterm/xterm": "^5.5.0",
166
167
  "ahooks": "^3.8.4",
167
168
  "ai": "^3.4.33",
168
169
  "antd": "^5.24.6",
@@ -44,7 +44,7 @@ const CheckError = ({
44
44
 
45
45
  const errorMessage = errorBody.error?.message;
46
46
 
47
- if (error?.type === 'OllamaServiceUnavailable') return <OllamaSetupGuide container={false} />;
47
+ if (error?.type === 'OllamaServiceUnavailable') return <OllamaSetupGuide />;
48
48
 
49
49
  // error of not pull the model
50
50
  const unresolvedModel = errorMessage?.match(UNRESOLVED_MODEL_REGEXP)?.[1];
@@ -3,6 +3,28 @@ import { AIChatModelCard } from '@/types/aiModel';
3
3
  // https://siliconflow.cn/zh-cn/models
4
4
 
5
5
  const siliconcloudChatModels: AIChatModelCard[] = [
6
+ {
7
+ abilities: {
8
+ functionCall: true,
9
+ reasoning: true,
10
+ },
11
+ contextWindowTokens: 131_072,
12
+ description:
13
+ 'Qwen3是一款能力大幅提升的新一代通义千问大模型,在推理、通用、Agent和多语言等多个核心能力上均达到业界领先水平,并支持思考模式切换。',
14
+ displayName: 'Qwen3 235B A22B',
15
+ id: 'Qwen/Qwen3-235B-A22B',
16
+ organization: 'Qwen',
17
+ pricing: {
18
+ currency: 'CNY',
19
+ input: 1.25,
20
+ output: 5,
21
+ },
22
+ releasedAt: '2025-04-28',
23
+ settings: {
24
+ extendParams: ['enableReasoning', 'reasoningBudgetToken'],
25
+ },
26
+ type: 'chat',
27
+ },
6
28
  {
7
29
  abilities: {
8
30
  functionCall: true,
@@ -110,6 +132,7 @@ const siliconcloudChatModels: AIChatModelCard[] = [
110
132
  },
111
133
  {
112
134
  abilities: {
135
+ functionCall: true,
113
136
  reasoning: true,
114
137
  },
115
138
  contextWindowTokens: 32_768,
@@ -126,6 +149,7 @@ const siliconcloudChatModels: AIChatModelCard[] = [
126
149
  },
127
150
  {
128
151
  abilities: {
152
+ functionCall: true,
129
153
  reasoning: true,
130
154
  },
131
155
  contextWindowTokens: 32_768,
@@ -142,6 +166,9 @@ const siliconcloudChatModels: AIChatModelCard[] = [
142
166
  type: 'chat',
143
167
  },
144
168
  {
169
+ abilities: {
170
+ functionCall: true,
171
+ },
145
172
  contextWindowTokens: 32_768,
146
173
  description:
147
174
  'GLM-4-32B-0414 是 GLM 系列的新一代开源模型,拥有 320 亿参数。该模型性能可与 OpenAI 的 GPT 系列和 DeepSeek 的 V3/R1 系列相媲美。',
@@ -155,6 +182,9 @@ const siliconcloudChatModels: AIChatModelCard[] = [
155
182
  type: 'chat',
156
183
  },
157
184
  {
185
+ abilities: {
186
+ functionCall: true,
187
+ },
158
188
  contextWindowTokens: 32_768,
159
189
  description:
160
190
  'GLM-4-9B-0414 是 GLM 系列的小型模型,拥有 90 亿参数。该模型继承了 GLM-4-32B 系列的技术特点,但提供了更轻量级的部署选择。尽管规模较小,GLM-4-9B-0414 仍在代码生成、网页设计、SVG 图形生成和基于搜索的写作等任务上展现出色能力。',
@@ -9,8 +9,6 @@ import { Center } from 'react-layout-kit';
9
9
  import FormAction from '@/components/FormAction';
10
10
  import { useChatStore } from '@/store/chat';
11
11
 
12
- import { ErrorActionContainer } from '../Conversation/Error/style';
13
-
14
12
  // TODO: 优化 Ollama setup 的流程,isDesktop 模式下可以直接做到端到端检测
15
13
  const OllamaDesktopSetupGuide = memo<{ id?: string }>(({ id }) => {
16
14
  const theme = useTheme();
@@ -22,44 +20,42 @@ const OllamaDesktopSetupGuide = memo<{ id?: string }>(({ id }) => {
22
20
  ]);
23
21
 
24
22
  return (
25
- <ErrorActionContainer style={{ paddingBlock: 0 }}>
26
- <Center gap={16} paddingBlock={32} style={{ maxWidth: 300, width: '100%' }}>
27
- <FormAction
28
- avatar={<Ollama color={theme.colorPrimary} size={64} />}
29
- description={
30
- <span>
31
- <Trans i18nKey={'OllamaSetupGuide.install.description'} ns={'components'}>
32
- 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
33
- <Link href={'https://ollama.com/download'}>下载</Link>
34
- </Trans>
35
- </span>
36
- }
37
- title={t('OllamaSetupGuide.install.title')}
38
- />
39
- {id && (
40
- <>
41
- <Button
42
- block
43
- onClick={() => {
44
- delAndRegenerateMessage(id);
45
- }}
46
- style={{ marginTop: 8 }}
47
- type={'primary'}
48
- >
49
- {t('OllamaSetupGuide.action.start')}
50
- </Button>
51
- <Button
52
- block
53
- onClick={() => {
54
- deleteMessage(id);
55
- }}
56
- >
57
- {t('OllamaSetupGuide.action.close')}
58
- </Button>
59
- </>
60
- )}
61
- </Center>
62
- </ErrorActionContainer>
23
+ <Center gap={16} paddingBlock={32} style={{ maxWidth: 300, width: '100%' }}>
24
+ <FormAction
25
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
26
+ description={
27
+ <span>
28
+ <Trans i18nKey={'OllamaSetupGuide.install.description'} ns={'components'}>
29
+ 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
30
+ <Link href={'https://ollama.com/download'}>下载</Link>
31
+ </Trans>
32
+ </span>
33
+ }
34
+ title={t('OllamaSetupGuide.install.title')}
35
+ />
36
+ {id && (
37
+ <>
38
+ <Button
39
+ block
40
+ onClick={() => {
41
+ delAndRegenerateMessage(id);
42
+ }}
43
+ style={{ marginTop: 8 }}
44
+ type={'primary'}
45
+ >
46
+ {t('OllamaSetupGuide.action.start')}
47
+ </Button>
48
+ <Button
49
+ block
50
+ onClick={() => {
51
+ deleteMessage(id);
52
+ }}
53
+ >
54
+ {t('OllamaSetupGuide.action.close')}
55
+ </Button>
56
+ </>
57
+ )}
58
+ </Center>
63
59
  );
64
60
  });
65
61
 
@@ -58,7 +58,6 @@ export const LobeSiliconCloudAI = LobeOpenAICompatibleFactory({
58
58
  max_tokens:
59
59
  max_tokens === undefined ? undefined : Math.min(Math.max(max_tokens, 1), 16_384),
60
60
  model,
61
- stream: !payload.tools,
62
61
  } as any;
63
62
  },
64
63
  },
@@ -73,6 +72,7 @@ export const LobeSiliconCloudAI = LobeOpenAICompatibleFactory({
73
72
  const { LOBE_DEFAULT_MODEL_LIST } = await import('@/config/aiModels');
74
73
 
75
74
  const functionCallKeywords = [
75
+ 'qwen/qwen3',
76
76
  'qwen/qwen2.5',
77
77
  'thudm/glm-4',
78
78
  'deepseek-ai/deepseek',
@@ -89,7 +89,12 @@ export const LobeSiliconCloudAI = LobeOpenAICompatibleFactory({
89
89
  'deepseek-ai/deepseek-vl',
90
90
  ];
91
91
 
92
- const reasoningKeywords = ['deepseek-ai/deepseek-r1', 'qwen/qvq', 'qwen/qwq'];
92
+ const reasoningKeywords = [
93
+ 'deepseek-ai/deepseek-r1',
94
+ 'qwen/qvq',
95
+ 'qwen/qwq',
96
+ 'qwen/qwen3',
97
+ ];
93
98
 
94
99
  const modelsPage = (await client.models.list()) as any;
95
100
  const modelList: SiliconCloudModelCard[] = modelsPage.data;
@@ -21,11 +21,9 @@ exports[`MCPClient > Stdio Transport > should list tools via stdio 1`] = `
21
21
  "name": "echo",
22
22
  },
23
23
  {
24
+ "annotations": {},
24
25
  "description": "Lists all available tools and methods",
25
26
  "inputSchema": {
26
- "$schema": "http://json-schema.org/draft-07/schema#",
27
- "additionalProperties": false,
28
- "properties": {},
29
27
  "type": "object",
30
28
  },
31
29
  "name": "debug",
@@ -70,7 +70,7 @@ export const createPluginStoreSlice: StateCreator<
70
70
  loadPluginStore: async () => {
71
71
  const pluginMarketIndex = await toolService.getToolList();
72
72
 
73
- set({ pluginStoreList: pluginMarketIndex }, false, n('loadPluginList'));
73
+ set({ pluginStoreList: pluginMarketIndex || [] }, false, n('loadPluginList'));
74
74
 
75
75
  return pluginMarketIndex;
76
76
  },
@@ -0,0 +1,46 @@
1
+ import { RenameLocalFileParams } from '@lobechat/electron-client-ipc';
2
+ import { Icon } from '@lobehub/ui';
3
+ import { createStyles } from 'antd-style';
4
+ import { ArrowRightIcon } from 'lucide-react';
5
+ import React, { memo } from 'react';
6
+ import { Flexbox } from 'react-layout-kit';
7
+
8
+ import FileIcon from '@/components/FileIcon';
9
+ import { LocalReadFileState } from '@/tools/local-files/type';
10
+ import { ChatMessagePluginError } from '@/types/message';
11
+
12
+ const useStyles = createStyles(({ css, token }) => ({
13
+ container: css`
14
+ color: ${token.colorTextQuaternary};
15
+ `,
16
+ new: css`
17
+ color: ${token.colorTextSecondary};
18
+ `,
19
+ }));
20
+
21
+ interface RenameLocalFileProps {
22
+ args: RenameLocalFileParams;
23
+ messageId: string;
24
+ pluginError: ChatMessagePluginError;
25
+ pluginState: LocalReadFileState;
26
+ }
27
+
28
+ const RenameLocalFile = memo<RenameLocalFileProps>(({ args }) => {
29
+ const { styles } = useStyles();
30
+
31
+ const oldFileName = args.path.split('/').at(-1);
32
+ return (
33
+ <Flexbox align={'center'} className={styles.container} gap={8} horizontal paddingInline={12}>
34
+ <Flexbox>{oldFileName}</Flexbox>
35
+ <Flexbox>
36
+ <Icon icon={ArrowRightIcon} />
37
+ </Flexbox>
38
+ <Flexbox className={styles.new} gap={4} horizontal>
39
+ <FileIcon fileName={args.newName} size={20} variant={'raw'} />
40
+ {args.newName}
41
+ </Flexbox>
42
+ </Flexbox>
43
+ );
44
+ });
45
+
46
+ export default RenameLocalFile;
@@ -6,12 +6,14 @@ import { BuiltinRenderProps } from '@/types/tool';
6
6
 
7
7
  import ListFiles from './ListFiles';
8
8
  import ReadLocalFile from './ReadLocalFile';
9
+ import RenameLocalFile from './RenameLocalFile';
9
10
  import SearchFiles from './SearchFiles';
10
11
 
11
12
  const RenderMap = {
12
13
  [LocalFilesApiName.searchLocalFiles]: SearchFiles,
13
14
  [LocalFilesApiName.listLocalFiles]: ListFiles,
14
15
  [LocalFilesApiName.readLocalFile]: ReadLocalFile,
16
+ [LocalFilesApiName.renameLocalFile]: RenameLocalFile,
15
17
  };
16
18
 
17
19
  const LocalFilesRender = memo<BuiltinRenderProps<LocalFileItem[]>>(