@lobehub/chat 1.68.1 → 1.68.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.68.3](https://github.com/lobehub/lobe-chat/compare/v1.68.2...v1.68.3)
6
+
7
+ <sup>Released on **2025-03-03**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Improve url rules.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Improve url rules, closes [#6669](https://github.com/lobehub/lobe-chat/issues/6669) ([5ee59e3](https://github.com/lobehub/lobe-chat/commit/5ee59e3))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.68.2](https://github.com/lobehub/lobe-chat/compare/v1.68.1...v1.68.2)
31
+
32
+ <sup>Released on **2025-03-03**</sup>
33
+
34
+ #### 💄 Styles
35
+
36
+ - **misc**: Add build-in web search support for Wenxin & Hunyuan.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### Styles
44
+
45
+ - **misc**: Add build-in web search support for Wenxin & Hunyuan, closes [#6617](https://github.com/lobehub/lobe-chat/issues/6617) ([dfd1f09](https://github.com/lobehub/lobe-chat/commit/dfd1f09))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ### [Version 1.68.1](https://github.com/lobehub/lobe-chat/compare/v1.68.0...v1.68.1)
6
56
 
7
57
  <sup>Released on **2025-03-03**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,22 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "fixes": [
5
+ "Improve url rules."
6
+ ]
7
+ },
8
+ "date": "2025-03-03",
9
+ "version": "1.68.3"
10
+ },
11
+ {
12
+ "children": {
13
+ "improvements": [
14
+ "Add build-in web search support for Wenxin & Hunyuan."
15
+ ]
16
+ },
17
+ "date": "2025-03-03",
18
+ "version": "1.68.2"
19
+ },
2
20
  {
3
21
  "children": {
4
22
  "fixes": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.68.1",
3
+ "version": "1.68.3",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -1,7 +1,7 @@
1
1
  import { CrawlImpl } from '../type';
2
2
 
3
3
  export const jina: CrawlImpl<{ apiKey?: string }> = async (url, params) => {
4
- const token = params.apiKey ?? process.env.JINA_API_KEY;
4
+ const token = params.apiKey ?? process.env.JINA_READER_API_KEY ?? process.env.JINA_API_KEY;
5
5
 
6
6
  try {
7
7
  const res = await fetch(`https://r.jina.ai/${url}`, {
@@ -37,7 +37,13 @@ export const crawUrlRules: CrawlUrlRule[] = [
37
37
  urlPattern: 'https://medium.com/(.*)',
38
38
  urlTransform: 'https://scribe.rip/$1',
39
39
  },
40
-
40
+ {
41
+ filterOptions: {
42
+ enableReadability: false,
43
+ },
44
+ impls: ['jina', 'browserless'],
45
+ urlPattern: 'https://(twitter.com|x.com)/(.*)',
46
+ },
41
47
  // 体育数据网站规则
42
48
  {
43
49
  filterOptions: {
@@ -4,6 +4,7 @@ const wenxinChatModels: AIChatModelCard[] = [
4
4
  {
5
5
  abilities: {
6
6
  functionCall: true,
7
+ search: true,
7
8
  },
8
9
  contextWindowTokens: 8192,
9
10
  description:
@@ -16,11 +17,15 @@ const wenxinChatModels: AIChatModelCard[] = [
16
17
  input: 0.8,
17
18
  output: 2,
18
19
  },
20
+ settings: {
21
+ searchImpl: 'params',
22
+ },
19
23
  type: 'chat',
20
24
  },
21
25
  {
22
26
  abilities: {
23
27
  functionCall: true,
28
+ search: true,
24
29
  },
25
30
  contextWindowTokens: 8192,
26
31
  description:
@@ -32,11 +37,15 @@ const wenxinChatModels: AIChatModelCard[] = [
32
37
  input: 0.8,
33
38
  output: 2,
34
39
  },
40
+ settings: {
41
+ searchImpl: 'params',
42
+ },
35
43
  type: 'chat',
36
44
  },
37
45
  {
38
46
  abilities: {
39
47
  functionCall: true,
48
+ search: true,
40
49
  },
41
50
  contextWindowTokens: 128_000,
42
51
  description:
@@ -49,11 +58,15 @@ const wenxinChatModels: AIChatModelCard[] = [
49
58
  input: 0.8,
50
59
  output: 2,
51
60
  },
61
+ settings: {
62
+ searchImpl: 'params',
63
+ },
52
64
  type: 'chat',
53
65
  },
54
66
  {
55
67
  abilities: {
56
68
  functionCall: true,
69
+ search: true,
57
70
  },
58
71
  contextWindowTokens: 8192,
59
72
  description:
@@ -66,11 +79,15 @@ const wenxinChatModels: AIChatModelCard[] = [
66
79
  input: 30,
67
80
  output: 90,
68
81
  },
82
+ settings: {
83
+ searchImpl: 'params',
84
+ },
69
85
  type: 'chat',
70
86
  },
71
87
  {
72
88
  abilities: {
73
89
  functionCall: true,
90
+ search: true,
74
91
  },
75
92
  contextWindowTokens: 8192,
76
93
  description:
@@ -82,11 +99,15 @@ const wenxinChatModels: AIChatModelCard[] = [
82
99
  input: 30,
83
100
  output: 90,
84
101
  },
102
+ settings: {
103
+ searchImpl: 'params',
104
+ },
85
105
  type: 'chat',
86
106
  },
87
107
  {
88
108
  abilities: {
89
109
  functionCall: true,
110
+ search: true,
90
111
  },
91
112
  contextWindowTokens: 8192,
92
113
  description:
@@ -99,11 +120,15 @@ const wenxinChatModels: AIChatModelCard[] = [
99
120
  input: 20,
100
121
  output: 60,
101
122
  },
123
+ settings: {
124
+ searchImpl: 'params',
125
+ },
102
126
  type: 'chat',
103
127
  },
104
128
  {
105
129
  abilities: {
106
130
  functionCall: true,
131
+ search: true,
107
132
  },
108
133
  contextWindowTokens: 128_000,
109
134
  description:
@@ -116,11 +141,15 @@ const wenxinChatModels: AIChatModelCard[] = [
116
141
  input: 20,
117
142
  output: 60,
118
143
  },
144
+ settings: {
145
+ searchImpl: 'params',
146
+ },
119
147
  type: 'chat',
120
148
  },
121
149
  {
122
150
  abilities: {
123
151
  functionCall: true,
152
+ search: true,
124
153
  },
125
154
  contextWindowTokens: 8192,
126
155
  description:
@@ -132,6 +161,9 @@ const wenxinChatModels: AIChatModelCard[] = [
132
161
  input: 20,
133
162
  output: 60,
134
163
  },
164
+ settings: {
165
+ searchImpl: 'params',
166
+ },
135
167
  type: 'chat',
136
168
  },
137
169
  {
@@ -1,7 +1,9 @@
1
1
  // @vitest-environment node
2
- import { ModelProvider } from '@/libs/agent-runtime';
2
+ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
3
+
4
+ import { LobeOpenAICompatibleRuntime, ModelProvider } from '@/libs/agent-runtime';
5
+ import { testProvider } from '@/libs/agent-runtime/providerTestUtils';
3
6
 
4
- import { testProvider } from '../providerTestUtils';
5
7
  import { LobeHunyuanAI } from './index';
6
8
 
7
9
  testProvider({
@@ -11,3 +13,136 @@ testProvider({
11
13
  chatDebugEnv: 'DEBUG_HUNYUAN_CHAT_COMPLETION',
12
14
  chatModel: 'hunyuan-lite',
13
15
  });
16
+
17
+ // Mock the console.error to avoid polluting test output
18
+ vi.spyOn(console, 'error').mockImplementation(() => {});
19
+
20
+ let instance: LobeOpenAICompatibleRuntime;
21
+
22
+ beforeEach(() => {
23
+ instance = new LobeHunyuanAI({ apiKey: 'test' });
24
+
25
+ // 使用 vi.spyOn 来模拟 chat.completions.create 方法
26
+ vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
27
+ new ReadableStream() as any,
28
+ );
29
+ });
30
+
31
+ describe('LobeHunyuanAI', () => {
32
+ describe('chat', () => {
33
+ it('should with search citations', async () => {
34
+ const data = [
35
+ {
36
+ id: "939fbdb8dbb9b4c5944cbbe687c977c2",
37
+ object: "chat.completion.chunk",
38
+ created: 1741000456,
39
+ model: "hunyuan-turbo",
40
+ system_fingerprint: "",
41
+ choices: [
42
+ {
43
+ index: 0,
44
+ delta: { role: "assistant", content: "为您" },
45
+ finish_reason: null
46
+ }
47
+ ],
48
+ note: "以上内容为AI生成,不代表开发者立场,请勿删除或修改本标记",
49
+ search_info: {
50
+ search_results: [
51
+ {
52
+ index: 1,
53
+ title: "公务员考试时政热点【2025年3月3日】_公务员考试网_华图教育",
54
+ url: "http://www.huatu.com/2025/0303/2803685.html",
55
+ icon: "https://hunyuan-img-1251316161.cos.ap-guangzhou.myqcloud.com/%2Fpublic/img/63ce96deffe0119827f12deaa5ffe7ef.jpg",
56
+ text: "华图教育官网"
57
+ },
58
+ {
59
+ index: 2,
60
+ title: "外交部新闻(2025年3月3日)",
61
+ url: "https://view.inews.qq.com/a/20250303A02NLC00?scene=qqsearch",
62
+ icon: "https://hunyuan-img-1251316161.cos.ap-guangzhou.myqcloud.com/%2Fpublic/img/00ce40298870d1accb7920d641152722.jpg",
63
+ text: "腾讯网"
64
+ }
65
+ ]
66
+ }
67
+ },
68
+ {
69
+ id: "939fbdb8dbb9b4c5944cbbe687c977c2",
70
+ object: "chat.completion.chunk",
71
+ created: 1741000456,
72
+ model: "hunyuan-turbo",
73
+ system_fingerprint: "",
74
+ choices: [
75
+ {
76
+ index: 0,
77
+ delta: { role: "assistant", content: "找到" },
78
+ finish_reason: null
79
+ }
80
+ ],
81
+ note: "以上内容为AI生成,不代表开发者立场,请勿删除或修改本标记",
82
+ search_info: {
83
+ search_results: [
84
+ {
85
+ index: 1,
86
+ title: "公务员考试时政热点【2025年3月3日】_公务员考试网_华图教育",
87
+ url: "http://www.huatu.com/2025/0303/2803685.html",
88
+ icon: "https://hunyuan-img-1251316161.cos.ap-guangzhou.myqcloud.com/%2Fpublic/img/63ce96deffe0119827f12deaa5ffe7ef.jpg",
89
+ text: "华图教育官网"
90
+ },
91
+ {
92
+ index: 2,
93
+ title: "外交部新闻(2025年3月3日)",
94
+ url: "https://view.inews.qq.com/a/20250303A02NLC00?scene=qqsearch",
95
+ icon: "https://hunyuan-img-1251316161.cos.ap-guangzhou.myqcloud.com/%2Fpublic/img/00ce40298870d1accb7920d641152722.jpg",
96
+ text: "腾讯网"
97
+ }
98
+ ]
99
+ }
100
+ }
101
+ ];
102
+
103
+ const mockStream = new ReadableStream({
104
+ start(controller) {
105
+ data.forEach((chunk) => {
106
+ controller.enqueue(chunk);
107
+ });
108
+
109
+ controller.close();
110
+ },
111
+ });
112
+
113
+ vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(mockStream as any);
114
+
115
+ const result = await instance.chat({
116
+ messages: [{ content: 'Hello', role: 'user' }],
117
+ model: 'mistralai/mistral-7b-instruct:free',
118
+ temperature: 0,
119
+ });
120
+
121
+ const decoder = new TextDecoder();
122
+ const reader = result.body!.getReader();
123
+ const stream: string[] = [];
124
+
125
+ while (true) {
126
+ const { value, done } = await reader.read();
127
+ if (done) break;
128
+ stream.push(decoder.decode(value));
129
+ }
130
+
131
+ expect(stream).toEqual(
132
+ [
133
+ 'id: 939fbdb8dbb9b4c5944cbbe687c977c2',
134
+ 'event: grounding',
135
+ 'data: {"citations":[{"title":"公务员考试时政热点【2025年3月3日】_公务员考试网_华图教育","url":"http://www.huatu.com/2025/0303/2803685.html"},{"title":"外交部新闻(2025年3月3日)","url":"https://view.inews.qq.com/a/20250303A02NLC00?scene=qqsearch"}]}\n',
136
+ 'id: 939fbdb8dbb9b4c5944cbbe687c977c2',
137
+ 'event: text',
138
+ 'data: "为您"\n',
139
+ 'id: 939fbdb8dbb9b4c5944cbbe687c977c2',
140
+ 'event: text',
141
+ 'data: "找到"\n',
142
+ ].map((line) => `${line}\n`),
143
+ );
144
+
145
+ expect((await reader.read()).done).toBe(true);
146
+ });
147
+ });
148
+ });
@@ -15,14 +15,15 @@ export const LobeHunyuanAI = LobeOpenAICompatibleFactory({
15
15
 
16
16
  return {
17
17
  ...rest,
18
+ stream: true,
18
19
  ...(enabledSearch && {
19
- /*
20
20
  citation: true,
21
+ enable_enhancement: true,
22
+ /*
21
23
  enable_multimedia: true,
22
- search_info: true
23
24
  */
24
- enable_enhancement: true,
25
25
  enable_speed_search: process.env.HUNYUAN_ENABLE_SPEED_SEARCH === '1',
26
+ search_info: true,
26
27
  }),
27
28
  } as any;
28
29
  },
@@ -35,7 +35,7 @@ export const LobeQwenAI = LobeOpenAICompatibleFactory({
35
35
  : presence_penalty !== undefined && presence_penalty >= -2 && presence_penalty <= 2
36
36
  ? presence_penalty
37
37
  : undefined,
38
- stream: !payload.tools,
38
+ stream: true,
39
39
  temperature:
40
40
  temperature !== undefined && temperature >= 0 && temperature < 2
41
41
  ? temperature
@@ -127,19 +127,35 @@ export const transformOpenAIStream = (
127
127
  }
128
128
 
129
129
  if (typeof content === 'string') {
130
- // in Perplexity api, the citation is in every chunk, but we only need to return it once
131
-
132
- if ('citations' in chunk && !!chunk.citations && !streamContext?.returnedPplxCitation) {
133
- streamContext.returnedPplxCitation = true;
134
-
135
- const citations = (chunk.citations as any[]).map((item) =>
136
- typeof item === 'string' ? ({ title: item, url: item } as CitationItem) : item,
137
- );
138
-
139
- return [
140
- { data: { citations }, id: chunk.id, type: 'grounding' },
141
- { data: content, id: chunk.id, type: 'text' },
142
- ];
130
+ if (!streamContext?.returnedCitation) {
131
+ const citations =
132
+ // in Perplexity api, the citation is in every chunk, but we only need to return it once
133
+ ('citations' in chunk && chunk.citations) ||
134
+ // in Hunyuan api, the citation is in every chunk
135
+ ('search_info' in chunk && (chunk.search_info as any)?.search_results) ||
136
+ // in Wenxin api, the citation is in the first and last chunk
137
+ ('search_results' in chunk && chunk.search_results);
138
+
139
+ if (citations) {
140
+ streamContext.returnedCitation = true;
141
+
142
+ return [
143
+ {
144
+ data: {
145
+ citations: (citations as any[]).map(
146
+ (item) =>
147
+ ({
148
+ title: typeof item === 'string' ? item : item.title,
149
+ url: typeof item === 'string' ? item : item.url,
150
+ }) as CitationItem
151
+ ),
152
+ },
153
+ id: chunk.id,
154
+ type: 'grounding',
155
+ },
156
+ { data: content, id: chunk.id, type: 'text' },
157
+ ];
158
+ }
143
159
  }
144
160
 
145
161
  return { data: content, id: chunk.id, type: 'text' };
@@ -9,9 +9,10 @@ export interface StreamContext {
9
9
  id: string;
10
10
  /**
11
11
  * As pplx citations is in every chunk, but we only need to return it once
12
- * this flag is used to check if the pplx citation is returned,and then not return it again
12
+ * this flag is used to check if the pplx citation is returned,and then not return it again.
13
+ * Same as Hunyuan and Wenxin
13
14
  */
14
- returnedPplxCitation?: boolean;
15
+ returnedCitation?: boolean;
15
16
  thinking?: {
16
17
  id: string;
17
18
  name: string;
@@ -0,0 +1,113 @@
1
+ // @vitest-environment node
2
+ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
3
+
4
+ import { LobeOpenAICompatibleRuntime, ModelProvider } from '@/libs/agent-runtime';
5
+ import { testProvider } from '@/libs/agent-runtime/providerTestUtils';
6
+
7
+ import { LobeWenxinAI } from './index';
8
+
9
+ testProvider({
10
+ Runtime: LobeWenxinAI,
11
+ provider: ModelProvider.Wenxin,
12
+ defaultBaseURL: 'https://qianfan.baidubce.com/v2',
13
+ chatDebugEnv: 'DEBUG_WENXIN_CHAT_COMPLETION',
14
+ chatModel: 'ernie-speed-128k',
15
+ });
16
+
17
+ // Mock the console.error to avoid polluting test output
18
+ vi.spyOn(console, 'error').mockImplementation(() => {});
19
+
20
+ let instance: LobeOpenAICompatibleRuntime;
21
+
22
+ beforeEach(() => {
23
+ instance = new LobeWenxinAI({ apiKey: 'test' });
24
+
25
+ // 使用 vi.spyOn 来模拟 chat.completions.create 方法
26
+ vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
27
+ new ReadableStream() as any,
28
+ );
29
+ });
30
+
31
+ describe('LobeWenxinAI', () => {
32
+ describe('chat', () => {
33
+ it('should with search citations', async () => {
34
+ const data = [
35
+ {
36
+ id: "as-bhrxwy5fq1",
37
+ object: "chat.completion.chunk",
38
+ created: 1741000028,
39
+ model: "ernie-4.0-8k-latest",
40
+ choices: [
41
+ {
42
+ index: 0,
43
+ delta: { content: "今天是**", role: "assistant" },
44
+ flag: 0
45
+ }
46
+ ],
47
+ search_results: [
48
+ { index: 1, url: "http://www.mnw.cn/news/shehui/", title: "社会新闻" },
49
+ { index: 2, url: "https://www.chinanews.com.cn/sh/2025/03-01/10376297.shtml", title: "中越边民共庆“春龙节”" },
50
+ { index: 3, url: "https://www.chinanews.com/china/index.shtml", title: "中国新闻网_时政" }
51
+ ]
52
+ },
53
+ {
54
+ id: "as-bhrxwy5fq1",
55
+ object: "chat.completion.chunk",
56
+ created: 1741000028,
57
+ model: "ernie-4.0-8k-latest",
58
+ choices: [
59
+ {
60
+ index: 0,
61
+ delta: { content: "20" },
62
+ flag: 0
63
+ }
64
+ ]
65
+ }
66
+ ];
67
+
68
+ const mockStream = new ReadableStream({
69
+ start(controller) {
70
+ data.forEach((chunk) => {
71
+ controller.enqueue(chunk);
72
+ });
73
+
74
+ controller.close();
75
+ },
76
+ });
77
+
78
+ vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(mockStream as any);
79
+
80
+ const result = await instance.chat({
81
+ messages: [{ content: 'Hello', role: 'user' }],
82
+ model: 'mistralai/mistral-7b-instruct:free',
83
+ temperature: 0,
84
+ });
85
+
86
+ const decoder = new TextDecoder();
87
+ const reader = result.body!.getReader();
88
+ const stream: string[] = [];
89
+
90
+ while (true) {
91
+ const { value, done } = await reader.read();
92
+ if (done) break;
93
+ stream.push(decoder.decode(value));
94
+ }
95
+
96
+ expect(stream).toEqual(
97
+ [
98
+ 'id: as-bhrxwy5fq1',
99
+ 'event: grounding',
100
+ 'data: {"citations":[{"title":"社会新闻","url":"http://www.mnw.cn/news/shehui/"},{"title":"中越边民共庆“春龙节”","url":"https://www.chinanews.com.cn/sh/2025/03-01/10376297.shtml"},{"title":"中国新闻网_时政","url":"https://www.chinanews.com/china/index.shtml"}]}\n',
101
+ 'id: as-bhrxwy5fq1',
102
+ 'event: text',
103
+ 'data: "今天是**"\n',
104
+ 'id: as-bhrxwy5fq1',
105
+ 'event: text',
106
+ 'data: "20"\n',
107
+ ].map((line) => `${line}\n`),
108
+ );
109
+
110
+ expect((await reader.read()).done).toBe(true);
111
+ });
112
+ });
113
+ });
@@ -3,6 +3,23 @@ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
3
3
 
4
4
  export const LobeWenxinAI = LobeOpenAICompatibleFactory({
5
5
  baseURL: 'https://qianfan.baidubce.com/v2',
6
+ chatCompletion: {
7
+ handlePayload: (payload) => {
8
+ const { enabledSearch, ...rest } = payload;
9
+
10
+ return {
11
+ ...rest,
12
+ stream: true,
13
+ ...(enabledSearch && {
14
+ web_search: {
15
+ enable: true,
16
+ enable_citation: true,
17
+ enable_trace: true,
18
+ }
19
+ }),
20
+ } as any;
21
+ },
22
+ },
6
23
  debug: {
7
24
  chatCompletion: () => process.env.DEBUG_WENXIN_CHAT_COMPLETION === '1',
8
25
  },