@lobehub/chat 1.82.2 β†’ 1.82.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.82.4](https://github.com/lobehub/lobe-chat/compare/v1.82.3...v1.82.4)
6
+
7
+ <sup>Released on **2025-04-24**</sup>
8
+
9
+ #### πŸ› Bug Fixes
10
+
11
+ - **misc**: Fix hydration error.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix hydration error, closes [#7535](https://github.com/lobehub/lobe-chat/issues/7535) ([e130855](https://github.com/lobehub/lobe-chat/commit/e130855))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.82.3](https://github.com/lobehub/lobe-chat/compare/v1.82.2...v1.82.3)
31
+
32
+ <sup>Released on **2025-04-24**</sup>
33
+
34
+ #### πŸ› Bug Fixes
35
+
36
+ - **misc**: Fix openai tools calling.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### What's fixed
44
+
45
+ - **misc**: Fix openai tools calling, closes [#7523](https://github.com/lobehub/lobe-chat/issues/7523) ([f43bd24](https://github.com/lobehub/lobe-chat/commit/f43bd24))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ### [Version 1.82.2](https://github.com/lobehub/lobe-chat/compare/v1.82.1...v1.82.2)
6
56
 
7
57
  <sup>Released on **2025-04-23**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,22 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "fixes": [
5
+ "Fix hydration error."
6
+ ]
7
+ },
8
+ "date": "2025-04-24",
9
+ "version": "1.82.4"
10
+ },
11
+ {
12
+ "children": {
13
+ "fixes": [
14
+ "Fix openai tools calling."
15
+ ]
16
+ },
17
+ "date": "2025-04-24",
18
+ "version": "1.82.3"
19
+ },
2
20
  {
3
21
  "children": {
4
22
  "improvements": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.82.2",
3
+ "version": "1.82.4",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -264,13 +264,13 @@ class _MessageModel extends BaseModel {
264
264
  translate,
265
265
  tts,
266
266
  ...item
267
- }: DBModel<DB_Message>): ChatMessage => {
267
+ }: DBModel<DB_Message>) => {
268
268
  return {
269
269
  ...item,
270
270
  extra: { fromModel, fromProvider, translate, tts },
271
271
  meta: {},
272
272
  topicId: item.topicId ?? undefined,
273
- };
273
+ } as ChatMessage;
274
274
  };
275
275
  }
276
276
 
@@ -11,7 +11,7 @@ const PluginSchema = z.object({
11
11
  identifier: z.string(),
12
12
  arguments: z.string(),
13
13
  apiName: z.string(),
14
- type: z.enum(['default', 'markdown', 'standalone', 'builtin']).default('default'),
14
+ type: z.enum(['default', 'markdown', 'standalone', 'builtin', 'mcp']).default('default'),
15
15
  });
16
16
 
17
17
  const ToolCallSchema = PluginSchema.extend({
@@ -742,6 +742,166 @@ describe('OpenAIStream', () => {
742
742
 
743
743
  expect(onToolCallMock).toHaveBeenCalledTimes(2);
744
744
  });
745
+
746
+ it('should handle vLLM tools Calling', async () => {
747
+ const streamData = [
748
+ {
749
+ id: '1',
750
+ object: 'chat.completion.chunk',
751
+ created: 1745385918,
752
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
753
+ choices: [
754
+ {
755
+ index: 0,
756
+ delta: {
757
+ tool_calls: [
758
+ {
759
+ id: 'chatcmpl-tool-ca9bc139abc449388c6457d5decc949b',
760
+ type: 'function',
761
+ index: 0,
762
+ function: { name: 'BingBS____BingSearch' },
763
+ },
764
+ ],
765
+ },
766
+ logprobs: null,
767
+ finish_reason: null,
768
+ },
769
+ ],
770
+ },
771
+ {
772
+ id: '1',
773
+ object: 'chat.completion.chunk',
774
+ created: 1745385918,
775
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
776
+ choices: [
777
+ {
778
+ index: 0,
779
+ delta: { tool_calls: [{ index: 0, function: { arguments: '{"query": "' } }] },
780
+ logprobs: null,
781
+ finish_reason: null,
782
+ },
783
+ ],
784
+ },
785
+ {
786
+ id: '1',
787
+ object: 'chat.completion.chunk',
788
+ created: 1745385918,
789
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
790
+ choices: [
791
+ {
792
+ index: 0,
793
+ delta: { tool_calls: [{ index: 0, function: { arguments: 'ζœ€θΏ‘' } }] },
794
+ logprobs: null,
795
+ finish_reason: null,
796
+ },
797
+ ],
798
+ },
799
+ {
800
+ id: '1',
801
+ object: 'chat.completion.chunk',
802
+ created: 1745385918,
803
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
804
+ choices: [
805
+ {
806
+ index: 0,
807
+ delta: { tool_calls: [{ index: 0, function: { arguments: 'ζ–°ι—»' } }] },
808
+ logprobs: null,
809
+ finish_reason: null,
810
+ },
811
+ ],
812
+ },
813
+ {
814
+ id: '1',
815
+ object: 'chat.completion.chunk',
816
+ created: 1745385918,
817
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
818
+ choices: [
819
+ {
820
+ index: 0,
821
+ delta: { tool_calls: [{ index: 0, function: { arguments: '"}' } }] },
822
+ logprobs: null,
823
+ finish_reason: null,
824
+ },
825
+ ],
826
+ },
827
+ {
828
+ id: '1',
829
+ object: 'chat.completion.chunk',
830
+ created: 1745385918,
831
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
832
+ choices: [
833
+ {
834
+ index: 0,
835
+ delta: { content: '' },
836
+ logprobs: null,
837
+ finish_reason: 'tool_calls',
838
+ stop_reason: null,
839
+ },
840
+ ],
841
+ },
842
+ {
843
+ id: '1',
844
+ object: 'chat.completion.chunk',
845
+ created: 1745385918,
846
+ model: 'Qwen/Qwen2.5-7B-Instruct-1M',
847
+ choices: [],
848
+ usage: { prompt_tokens: 333, total_tokens: 359, completion_tokens: 26 },
849
+ },
850
+ ];
851
+
852
+ const mockOpenAIStream = new ReadableStream({
853
+ start(controller) {
854
+ streamData.forEach((data) => {
855
+ controller.enqueue(data);
856
+ });
857
+
858
+ controller.close();
859
+ },
860
+ });
861
+
862
+ const onToolCallMock = vi.fn();
863
+
864
+ const protocolStream = OpenAIStream(mockOpenAIStream, {
865
+ callbacks: {
866
+ onToolsCalling: onToolCallMock,
867
+ },
868
+ });
869
+
870
+ const decoder = new TextDecoder();
871
+ const chunks = [];
872
+
873
+ // @ts-ignore
874
+ for await (const chunk of protocolStream) {
875
+ chunks.push(decoder.decode(chunk, { stream: true }));
876
+ }
877
+
878
+ expect(chunks).toEqual(
879
+ [
880
+ 'id: 1',
881
+ 'event: tool_calls',
882
+ `data: [{"function":{"arguments":"","name":"BingBS____BingSearch"},"id":"chatcmpl-tool-ca9bc139abc449388c6457d5decc949b","index":0,"type":"function"}]\n`,
883
+ 'id: 1',
884
+ 'event: tool_calls',
885
+ `data: [{"function":{"arguments":"{\\"query\\": \\"","name":null},"id":"chatcmpl-tool-ca9bc139abc449388c6457d5decc949b","index":0,"type":"function"}]\n`,
886
+ 'id: 1',
887
+ 'event: tool_calls',
888
+ `data: [{"function":{"arguments":"ζœ€θΏ‘","name":null},"id":"chatcmpl-tool-ca9bc139abc449388c6457d5decc949b","index":0,"type":"function"}]\n`,
889
+ 'id: 1',
890
+ 'event: tool_calls',
891
+ `data: [{"function":{"arguments":"ζ–°ι—»","name":null},"id":"chatcmpl-tool-ca9bc139abc449388c6457d5decc949b","index":0,"type":"function"}]\n`,
892
+ 'id: 1',
893
+ 'event: tool_calls',
894
+ `data: [{"function":{"arguments":"\\"}","name":null},"id":"chatcmpl-tool-ca9bc139abc449388c6457d5decc949b","index":0,"type":"function"}]\n`,
895
+ 'id: 1',
896
+ 'event: stop',
897
+ `data: "tool_calls"\n`,
898
+ 'id: 1',
899
+ 'event: usage',
900
+ `data: {"inputTextTokens":333,"outputTextTokens":26,"totalInputTokens":333,"totalOutputTokens":26,"totalTokens":359}\n`,
901
+ ].map((i) => `${i}\n`),
902
+ );
903
+
904
+ });
745
905
  });
746
906
 
747
907
  describe('Reasoning', () => {
@@ -70,7 +70,7 @@ export const transformOpenAIStream = (
70
70
 
71
71
  return {
72
72
  function: {
73
- arguments: value.function?.arguments ?? '{}',
73
+ arguments: value.function?.arguments ?? '',
74
74
  name: value.function?.name ?? null,
75
75
  },
76
76
  id:
@@ -1,11 +0,0 @@
1
- import { PropsWithChildren } from 'react';
2
-
3
- const Layout = ({ children }: PropsWithChildren) => {
4
- return (
5
- <html>
6
- <body>{children}</body>
7
- </html>
8
- );
9
- };
10
-
11
- export default Layout;
@@ -1 +0,0 @@
1
- export { default } from '@/components/404';