@lobehub/chat 1.12.18 → 1.12.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/package.json +2 -2
- package/src/database/server/models/session.ts +2 -1
- package/src/features/ChatInput/ActionBar/Knowledge/index.tsx +7 -0
- package/src/features/ChatInput/ActionBar/Upload/index.tsx +5 -1
- package/src/libs/agent-runtime/groq/index.test.ts +1 -1
- package/src/libs/agent-runtime/qwen/index.test.ts +2 -2
- package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.test.ts +28 -22
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,56 @@
|
|
|
2
2
|
|
|
3
3
|
# Changelog
|
|
4
4
|
|
|
5
|
+
### [Version 1.12.20](https://github.com/lobehub/lobe-chat/compare/v1.12.19...v1.12.20)
|
|
6
|
+
|
|
7
|
+
<sup>Released on **2024-08-26**</sup>
|
|
8
|
+
|
|
9
|
+
#### 🐛 Bug Fixes
|
|
10
|
+
|
|
11
|
+
- **misc**: Feature flag `knowledge_base` doesn't affect ActionBar.
|
|
12
|
+
|
|
13
|
+
<br/>
|
|
14
|
+
|
|
15
|
+
<details>
|
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
17
|
+
|
|
18
|
+
#### What's fixed
|
|
19
|
+
|
|
20
|
+
- **misc**: Feature flag `knowledge_base` doesn't affect ActionBar, closes [#3609](https://github.com/lobehub/lobe-chat/issues/3609) ([1a5286b](https://github.com/lobehub/lobe-chat/commit/1a5286b))
|
|
21
|
+
|
|
22
|
+
</details>
|
|
23
|
+
|
|
24
|
+
<div align="right">
|
|
25
|
+
|
|
26
|
+
[](#readme-top)
|
|
27
|
+
|
|
28
|
+
</div>
|
|
29
|
+
|
|
30
|
+
### [Version 1.12.19](https://github.com/lobehub/lobe-chat/compare/v1.12.18...v1.12.19)
|
|
31
|
+
|
|
32
|
+
<sup>Released on **2024-08-25**</sup>
|
|
33
|
+
|
|
34
|
+
#### 🐛 Bug Fixes
|
|
35
|
+
|
|
36
|
+
- **misc**: Fix cannot clone agent when imported from client.
|
|
37
|
+
|
|
38
|
+
<br/>
|
|
39
|
+
|
|
40
|
+
<details>
|
|
41
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
42
|
+
|
|
43
|
+
#### What's fixed
|
|
44
|
+
|
|
45
|
+
- **misc**: Fix cannot clone agent when imported from client, closes [#3606](https://github.com/lobehub/lobe-chat/issues/3606) ([1fd2fa0](https://github.com/lobehub/lobe-chat/commit/1fd2fa0))
|
|
46
|
+
|
|
47
|
+
</details>
|
|
48
|
+
|
|
49
|
+
<div align="right">
|
|
50
|
+
|
|
51
|
+
[](#readme-top)
|
|
52
|
+
|
|
53
|
+
</div>
|
|
54
|
+
|
|
5
55
|
### [Version 1.12.18](https://github.com/lobehub/lobe-chat/compare/v1.12.17...v1.12.18)
|
|
6
56
|
|
|
7
57
|
<sup>Released on **2024-08-25**</sup>
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lobehub/chat",
|
|
3
|
-
"version": "1.12.
|
|
3
|
+
"version": "1.12.20",
|
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"framework",
|
|
@@ -171,7 +171,7 @@
|
|
|
171
171
|
"nuqs": "^1.17.8",
|
|
172
172
|
"officeparser": "^4.1.1",
|
|
173
173
|
"ollama": "^0.5.8",
|
|
174
|
-
"openai": "
|
|
174
|
+
"openai": "^4.56.0",
|
|
175
175
|
"partial-json": "^0.1.7",
|
|
176
176
|
"pdf-parse": "^1.1.1",
|
|
177
177
|
"pdfjs-dist": "4.4.168",
|
|
@@ -170,7 +170,8 @@ export class SessionModel {
|
|
|
170
170
|
|
|
171
171
|
if (!result) return;
|
|
172
172
|
|
|
173
|
-
|
|
173
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
174
|
+
const { agent, clientId, ...session } = result;
|
|
174
175
|
const sessionId = this.genId();
|
|
175
176
|
|
|
176
177
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
@@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next';
|
|
|
5
5
|
|
|
6
6
|
import TipGuide from '@/components/TipGuide';
|
|
7
7
|
import { isServerMode } from '@/const/version';
|
|
8
|
+
import { featureFlagsSelectors, useServerConfigStore } from '@/store/serverConfig';
|
|
8
9
|
import { useUserStore } from '@/store/user';
|
|
9
10
|
import { preferenceSelectors } from '@/store/user/selectors';
|
|
10
11
|
|
|
@@ -15,11 +16,17 @@ const enableKnowledge = isServerMode;
|
|
|
15
16
|
const Knowledge = memo(() => {
|
|
16
17
|
const { t } = useTranslation('chat');
|
|
17
18
|
|
|
19
|
+
const { enableKnowledgeBase } = useServerConfigStore(featureFlagsSelectors);
|
|
20
|
+
|
|
18
21
|
const [showTip, updateGuideState] = useUserStore((s) => [
|
|
19
22
|
preferenceSelectors.showUploadFileInKnowledgeBaseTip(s),
|
|
20
23
|
s.updateGuideState,
|
|
21
24
|
]);
|
|
22
25
|
|
|
26
|
+
if (!enableKnowledgeBase) {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
|
|
23
30
|
const content = (
|
|
24
31
|
<DropdownMenu>
|
|
25
32
|
<ActionIcon
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import { isServerMode } from '@/const/version';
|
|
2
|
+
import { featureFlagsSelectors, useServerConfigStore } from '@/store/serverConfig';
|
|
2
3
|
|
|
3
4
|
import ClientMode from './ClientMode';
|
|
4
5
|
import ServerMode from './ServerMode';
|
|
5
6
|
|
|
6
|
-
const Upload =
|
|
7
|
+
const Upload = () => {
|
|
8
|
+
const { enableKnowledgeBase } = useServerConfigStore(featureFlagsSelectors);
|
|
9
|
+
return isServerMode && enableKnowledgeBase ? <ServerMode /> : <ClientMode />;
|
|
10
|
+
};
|
|
7
11
|
|
|
8
12
|
export default Upload;
|
|
@@ -106,7 +106,7 @@ describe('LobeQwenAI', () => {
|
|
|
106
106
|
});
|
|
107
107
|
|
|
108
108
|
it('should transform non-streaming response to stream correctly', async () => {
|
|
109
|
-
const mockResponse
|
|
109
|
+
const mockResponse = {
|
|
110
110
|
id: 'chatcmpl-fc539f49-51a8-94be-8061',
|
|
111
111
|
object: 'chat.completion',
|
|
112
112
|
created: 1719901794,
|
|
@@ -119,7 +119,7 @@ describe('LobeQwenAI', () => {
|
|
|
119
119
|
logprobs: null,
|
|
120
120
|
},
|
|
121
121
|
],
|
|
122
|
-
};
|
|
122
|
+
} as OpenAI.ChatCompletion;
|
|
123
123
|
vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
|
|
124
124
|
mockResponse as any,
|
|
125
125
|
);
|
|
@@ -342,7 +342,7 @@ describe('LobeOpenAICompatibleFactory', () => {
|
|
|
342
342
|
});
|
|
343
343
|
|
|
344
344
|
it('should transform non-streaming response to stream correctly', async () => {
|
|
345
|
-
const mockResponse
|
|
345
|
+
const mockResponse = {
|
|
346
346
|
id: 'a',
|
|
347
347
|
object: 'chat.completion',
|
|
348
348
|
created: 123,
|
|
@@ -360,7 +360,7 @@ describe('LobeOpenAICompatibleFactory', () => {
|
|
|
360
360
|
completion_tokens: 5,
|
|
361
361
|
total_tokens: 10,
|
|
362
362
|
},
|
|
363
|
-
};
|
|
363
|
+
} as OpenAI.ChatCompletion;
|
|
364
364
|
vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
|
|
365
365
|
mockResponse as any,
|
|
366
366
|
);
|
|
@@ -426,27 +426,29 @@ describe('LobeOpenAICompatibleFactory', () => {
|
|
|
426
426
|
},
|
|
427
427
|
provider: ModelProvider.Mistral,
|
|
428
428
|
});
|
|
429
|
-
|
|
429
|
+
|
|
430
430
|
const instance = new LobeMockProvider({ apiKey: 'test' });
|
|
431
|
-
const mockCreateMethod = vi
|
|
432
|
-
|
|
431
|
+
const mockCreateMethod = vi
|
|
432
|
+
.spyOn(instance['client'].chat.completions, 'create')
|
|
433
|
+
.mockResolvedValue(new ReadableStream() as any);
|
|
434
|
+
|
|
433
435
|
await instance.chat(
|
|
434
436
|
{
|
|
435
437
|
messages: [{ content: 'Hello', role: 'user' }],
|
|
436
438
|
model: 'open-mistral-7b',
|
|
437
439
|
temperature: 0,
|
|
438
440
|
},
|
|
439
|
-
{ user: 'testUser' }
|
|
441
|
+
{ user: 'testUser' },
|
|
440
442
|
);
|
|
441
|
-
|
|
443
|
+
|
|
442
444
|
expect(mockCreateMethod).toHaveBeenCalledWith(
|
|
443
445
|
expect.not.objectContaining({
|
|
444
446
|
user: 'testUser',
|
|
445
447
|
}),
|
|
446
|
-
expect.anything()
|
|
448
|
+
expect.anything(),
|
|
447
449
|
);
|
|
448
450
|
});
|
|
449
|
-
|
|
451
|
+
|
|
450
452
|
it('should add user to payload when noUserId is false', async () => {
|
|
451
453
|
const LobeMockProvider = LobeOpenAICompatibleFactory({
|
|
452
454
|
baseURL: 'https://api.mistral.ai/v1',
|
|
@@ -455,50 +457,54 @@ describe('LobeOpenAICompatibleFactory', () => {
|
|
|
455
457
|
},
|
|
456
458
|
provider: ModelProvider.Mistral,
|
|
457
459
|
});
|
|
458
|
-
|
|
460
|
+
|
|
459
461
|
const instance = new LobeMockProvider({ apiKey: 'test' });
|
|
460
|
-
const mockCreateMethod = vi
|
|
461
|
-
|
|
462
|
+
const mockCreateMethod = vi
|
|
463
|
+
.spyOn(instance['client'].chat.completions, 'create')
|
|
464
|
+
.mockResolvedValue(new ReadableStream() as any);
|
|
465
|
+
|
|
462
466
|
await instance.chat(
|
|
463
467
|
{
|
|
464
468
|
messages: [{ content: 'Hello', role: 'user' }],
|
|
465
469
|
model: 'open-mistral-7b',
|
|
466
470
|
temperature: 0,
|
|
467
471
|
},
|
|
468
|
-
{ user: 'testUser' }
|
|
472
|
+
{ user: 'testUser' },
|
|
469
473
|
);
|
|
470
|
-
|
|
474
|
+
|
|
471
475
|
expect(mockCreateMethod).toHaveBeenCalledWith(
|
|
472
476
|
expect.objectContaining({
|
|
473
477
|
user: 'testUser',
|
|
474
478
|
}),
|
|
475
|
-
expect.anything()
|
|
479
|
+
expect.anything(),
|
|
476
480
|
);
|
|
477
481
|
});
|
|
478
|
-
|
|
482
|
+
|
|
479
483
|
it('should add user to payload when noUserId is not set in chatCompletion', async () => {
|
|
480
484
|
const LobeMockProvider = LobeOpenAICompatibleFactory({
|
|
481
485
|
baseURL: 'https://api.mistral.ai/v1',
|
|
482
486
|
provider: ModelProvider.Mistral,
|
|
483
487
|
});
|
|
484
|
-
|
|
488
|
+
|
|
485
489
|
const instance = new LobeMockProvider({ apiKey: 'test' });
|
|
486
|
-
const mockCreateMethod = vi
|
|
487
|
-
|
|
490
|
+
const mockCreateMethod = vi
|
|
491
|
+
.spyOn(instance['client'].chat.completions, 'create')
|
|
492
|
+
.mockResolvedValue(new ReadableStream() as any);
|
|
493
|
+
|
|
488
494
|
await instance.chat(
|
|
489
495
|
{
|
|
490
496
|
messages: [{ content: 'Hello', role: 'user' }],
|
|
491
497
|
model: 'open-mistral-7b',
|
|
492
498
|
temperature: 0,
|
|
493
499
|
},
|
|
494
|
-
{ user: 'testUser' }
|
|
500
|
+
{ user: 'testUser' },
|
|
495
501
|
);
|
|
496
|
-
|
|
502
|
+
|
|
497
503
|
expect(mockCreateMethod).toHaveBeenCalledWith(
|
|
498
504
|
expect.objectContaining({
|
|
499
505
|
user: 'testUser',
|
|
500
506
|
}),
|
|
501
|
-
expect.anything()
|
|
507
|
+
expect.anything(),
|
|
502
508
|
);
|
|
503
509
|
});
|
|
504
510
|
});
|