@lobehub/chat 1.63.2 → 1.63.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.63.3](https://github.com/lobehub/lobe-chat/compare/v1.63.2...v1.63.3)
|
6
|
+
|
7
|
+
<sup>Released on **2025-02-24**</sup>
|
8
|
+
|
9
|
+
#### 🐛 Bug Fixes
|
10
|
+
|
11
|
+
- **misc**: Fix citation=null issue in stream.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### What's fixed
|
19
|
+
|
20
|
+
- **misc**: Fix citation=null issue in stream, closes [#6461](https://github.com/lobehub/lobe-chat/issues/6461) ([3f9498e](https://github.com/lobehub/lobe-chat/commit/3f9498e))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.63.2](https://github.com/lobehub/lobe-chat/compare/v1.63.1...v1.63.2)
|
6
31
|
|
7
32
|
<sup>Released on **2025-02-24**</sup>
|
package/changelog/v1.json
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.63.
|
3
|
+
"version": "1.63.3",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -216,6 +216,32 @@ describe('AiInfraRepos', () => {
|
|
216
216
|
}),
|
217
217
|
);
|
218
218
|
});
|
219
|
+
|
220
|
+
it('should include settings property from builtin model', async () => {
|
221
|
+
const mockProviders = [
|
222
|
+
{ enabled: true, id: 'openai', name: 'OpenAI', source: 'builtin' },
|
223
|
+
] as AiProviderListItem[];
|
224
|
+
const mockAllModels: EnabledAiModel[] = [];
|
225
|
+
const mockSettings = { searchImpl: 'tool' as const };
|
226
|
+
|
227
|
+
vi.spyOn(repo, 'getAiProviderList').mockResolvedValue(mockProviders);
|
228
|
+
vi.spyOn(repo.aiModelModel, 'getAllModels').mockResolvedValue(mockAllModels);
|
229
|
+
vi.spyOn(repo as any, 'fetchBuiltinModels').mockResolvedValue([
|
230
|
+
{
|
231
|
+
enabled: true,
|
232
|
+
id: 'gpt-4',
|
233
|
+
settings: mockSettings,
|
234
|
+
type: 'chat',
|
235
|
+
},
|
236
|
+
]);
|
237
|
+
|
238
|
+
const result = await repo.getEnabledModels();
|
239
|
+
|
240
|
+
expect(result[0]).toMatchObject({
|
241
|
+
id: 'gpt-4',
|
242
|
+
settings: mockSettings,
|
243
|
+
});
|
244
|
+
});
|
219
245
|
});
|
220
246
|
|
221
247
|
describe('getAiProviderModelList', () => {
|
@@ -239,6 +265,7 @@ describe('AiInfraRepos', () => {
|
|
239
265
|
]),
|
240
266
|
);
|
241
267
|
});
|
268
|
+
|
242
269
|
it('should merge default and custom models', async () => {
|
243
270
|
const mockCustomModels = [
|
244
271
|
{
|
@@ -321,6 +348,7 @@ describe('AiInfraRepos', () => {
|
|
321
348
|
runtimeConfig: expect.any(Object),
|
322
349
|
});
|
323
350
|
});
|
351
|
+
|
324
352
|
it('should return provider runtime state', async () => {
|
325
353
|
const mockRuntimeConfig = {
|
326
354
|
openai: {
|
@@ -385,6 +413,7 @@ describe('AiInfraRepos', () => {
|
|
385
413
|
enabled: true, // from mockProviderConfigs
|
386
414
|
});
|
387
415
|
});
|
416
|
+
|
388
417
|
it('should merge provider configs correctly', async () => {
|
389
418
|
const mockProviderDetail = {
|
390
419
|
enabled: true,
|
@@ -66,6 +66,29 @@ describe('LobePerplexityAI', () => {
|
|
66
66
|
|
67
67
|
it('should with search citations', async () => {
|
68
68
|
const data = [
|
69
|
+
{
|
70
|
+
id: '506d64fb-e7f2-4d94-b80f-158369e9446d',
|
71
|
+
model: 'sonar-pro',
|
72
|
+
created: 1739896615,
|
73
|
+
object: 'chat.completion.chunk',
|
74
|
+
choices: [
|
75
|
+
{
|
76
|
+
finish_reason: null,
|
77
|
+
index: 0,
|
78
|
+
delta: {
|
79
|
+
refusal: null,
|
80
|
+
content: '<think>',
|
81
|
+
role: 'assistant',
|
82
|
+
function_call: null,
|
83
|
+
tool_calls: null,
|
84
|
+
audio: null,
|
85
|
+
},
|
86
|
+
logprobs: null,
|
87
|
+
},
|
88
|
+
],
|
89
|
+
stream_options: null,
|
90
|
+
citations: null,
|
91
|
+
},
|
69
92
|
{
|
70
93
|
id: '506d64fb-e7f2-4d94-b80f-158369e9446d',
|
71
94
|
model: 'sonar-pro',
|
@@ -202,6 +225,9 @@ describe('LobePerplexityAI', () => {
|
|
202
225
|
|
203
226
|
expect(stream).toEqual(
|
204
227
|
[
|
228
|
+
'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
|
229
|
+
'event: text',
|
230
|
+
'data: "<think>"\n',
|
205
231
|
'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
|
206
232
|
'event: grounding',
|
207
233
|
'data: {"citations":[{"title":"https://www.weather.com.cn/weather/101210101.shtml","url":"https://www.weather.com.cn/weather/101210101.shtml"},{"title":"https://tianqi.moji.com/weather/china/zhejiang/hangzhou","url":"https://tianqi.moji.com/weather/china/zhejiang/hangzhou"},{"title":"https://weather.cma.cn/web/weather/58457.html","url":"https://weather.cma.cn/web/weather/58457.html"},{"title":"https://tianqi.so.com/weather/101210101","url":"https://tianqi.so.com/weather/101210101"},{"title":"https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832","url":"https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832"},{"title":"https://www.hzqx.com","url":"https://www.hzqx.com"},{"title":"https://www.hzqx.com/pc/hztq/","url":"https://www.hzqx.com/pc/hztq/"}]}\n',
|
@@ -119,7 +119,7 @@ export const transformOpenAIStream = (
|
|
119
119
|
if (typeof content === 'string') {
|
120
120
|
// in Perplexity api, the citation is in every chunk, but we only need to return it once
|
121
121
|
|
122
|
-
if ('citations' in chunk && !streamContext?.returnedPplxCitation) {
|
122
|
+
if ('citations' in chunk && !!chunk.citations && !streamContext?.returnedPplxCitation) {
|
123
123
|
streamContext.returnedPplxCitation = true;
|
124
124
|
|
125
125
|
const citations = (chunk.citations as any[]).map((item) =>
|