@lobehub/chat 1.52.12 → 1.52.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,32 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.52.13](https://github.com/lobehub/lobe-chat/compare/v1.52.12...v1.52.13)
6
+
7
+ <sup>Released on **2025-02-10**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Fix Aliyun deepseek-r1 reasoning parsing with oneapi, Support Aliyun deepseek-r1 reasoning.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix Aliyun deepseek-r1 reasoning parsing with oneapi, closes [#5964](https://github.com/lobehub/lobe-chat/issues/5964) ([0d7e665](https://github.com/lobehub/lobe-chat/commit/0d7e665))
21
+ - **misc**: Support Aliyun deepseek-r1 reasoning, closes [#5954](https://github.com/lobehub/lobe-chat/issues/5954) ([cf7a2d6](https://github.com/lobehub/lobe-chat/commit/cf7a2d6))
22
+
23
+ </details>
24
+
25
+ <div align="right">
26
+
27
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
28
+
29
+ </div>
30
+
5
31
  ### [Version 1.52.12](https://github.com/lobehub/lobe-chat/compare/v1.52.11...v1.52.12)
6
32
 
7
33
  <sup>Released on **2025-02-10**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,13 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "fixes": [
5
+ "Fix Aliyun deepseek-r1 reasoning parsing with oneapi, Support Aliyun deepseek-r1 reasoning."
6
+ ]
7
+ },
8
+ "date": "2025-02-10",
9
+ "version": "1.52.13"
10
+ },
2
11
  {
3
12
  "children": {
4
13
  "fixes": [
@@ -36,6 +36,10 @@ To install aaPanel, go to the [aaPanel](https://www.aapanel.com/new/download.htm
36
36
 
37
37
  6. After submission, the panel will automatically initialize the application, which will take about `1-3` minutes. It can be accessed after the initialization is completed.
38
38
 
39
+ <Callout type="warning">
40
+ ⚠️ Do not enable any form of cache in the reverse proxy settings of the panel to avoid affecting the normal operation of the service. Read more at https://github.com/lobehub/lobe-chat/discussions/5986
41
+ </Callout>
42
+
39
43
  ## Visit LobeChat
40
44
 
41
45
  - If you have set a domain name, please directly enter the domain name in the browser address bar, such as `http://demo.lobechat`, to access the `LobeChat` console.
@@ -40,6 +40,10 @@ tags:
40
40
 
41
41
  5. 提交后面板会自动进行应用初始化,大概需要`1-3`分钟,初始化完成后即可访问。
42
42
 
43
+ <Callout type="warning">
44
+ ⚠️ 请不要在面板的反向代理设置中开启任何形式的缓存,以免影响服务的正常运行。详情请见 https://github.com/lobehub/lobe-chat/discussions/5986
45
+ </Callout>
46
+
43
47
  ## 访问 LobeChat
44
48
 
45
49
  - 如果您填写域名,请在浏览器输入您的域名访问,如`http://demo.lobechat`,即可访问 `LobeChat` 页面。
@@ -226,6 +226,10 @@ The script supports the following deployment modes; please choose the appropriat
226
226
  proxy_set_header X-Forwarded-Proto $scheme; # Keep the request protocol
227
227
  }
228
228
  ```
229
+
230
+ ⚠️ If you are using such panel software,
231
+ please do not enable any form of caching in the reverse proxy settings of such panel software to avoid affecting the normal operation of the service.
232
+ Read more at https://github.com/lobehub/lobe-chat/discussions/5986
229
233
  </Callout>
230
234
 
231
235
  ### Complete Remaining Configuration in Interactive Script
@@ -224,6 +224,9 @@ bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
224
224
  proxy_set_header X-Forwarded-Proto $scheme; # 保留请求协议
225
225
  }
226
226
  ```
227
+
228
+ ⚠️ 请不要在此类面板软件的反向代理设置中开启任何形式的缓存,以免影响服务的正常运行。
229
+ 详情请见 https://github.com/lobehub/lobe-chat/discussions/5986
227
230
  </Callout>
228
231
 
229
232
  ### 在交互式脚本中完成剩余配置
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.52.12",
3
+ "version": "1.52.13",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -755,6 +755,225 @@ describe('OpenAIStream', () => {
755
755
  );
756
756
  });
757
757
 
758
+ it('should handle reasoning event in aliyun bailian api', async () => {
759
+ const data = [
760
+ {
761
+ id: '1',
762
+ object: 'chat.completion.chunk',
763
+ created: 1737563070,
764
+ model: 'deepseek-reasoner',
765
+ system_fingerprint: 'fp_1c5d8833bc',
766
+ choices: [
767
+ {
768
+ index: 0,
769
+ delta: { role: 'assistant', content: '', reasoning_content: '' },
770
+ logprobs: null,
771
+ finish_reason: null,
772
+ },
773
+ ],
774
+ },
775
+ {
776
+ id: '1',
777
+ object: 'chat.completion.chunk',
778
+ created: 1737563070,
779
+ model: 'deepseek-reasoner',
780
+ system_fingerprint: 'fp_1c5d8833bc',
781
+ choices: [
782
+ {
783
+ index: 0,
784
+ delta: { content: '', reasoning_content: '您好' },
785
+ logprobs: null,
786
+ finish_reason: null,
787
+ },
788
+ ],
789
+ },
790
+ {
791
+ id: '1',
792
+ object: 'chat.completion.chunk',
793
+ created: 1737563070,
794
+ model: 'deepseek-reasoner',
795
+ system_fingerprint: 'fp_1c5d8833bc',
796
+ choices: [
797
+ {
798
+ index: 0,
799
+ delta: { content: '', reasoning_content: '!' },
800
+ logprobs: null,
801
+ finish_reason: null,
802
+ },
803
+ ],
804
+ },
805
+ {
806
+ id: '1',
807
+ object: 'chat.completion.chunk',
808
+ created: 1737563070,
809
+ model: 'deepseek-reasoner',
810
+ system_fingerprint: 'fp_1c5d8833bc',
811
+ choices: [
812
+ {
813
+ index: 0,
814
+ delta: { content: '', reasoning_content: '' },
815
+ logprobs: null,
816
+ finish_reason: null,
817
+ },
818
+ ],
819
+ },
820
+ {
821
+ id: '1',
822
+ object: 'chat.completion.chunk',
823
+ created: 1737563070,
824
+ model: 'deepseek-reasoner',
825
+ system_fingerprint: 'fp_1c5d8833bc',
826
+ choices: [
827
+ {
828
+ index: 0,
829
+ delta: { content: '你好', reasoning_content: '' },
830
+ logprobs: null,
831
+ finish_reason: null,
832
+ },
833
+ ],
834
+ },
835
+ {
836
+ id: '1',
837
+ object: 'chat.completion.chunk',
838
+ created: 1737563070,
839
+ model: 'deepseek-reasoner',
840
+ system_fingerprint: 'fp_1c5d8833bc',
841
+ choices: [
842
+ {
843
+ index: 0,
844
+ delta: { content: '很高兴', reasoning_cont: '' },
845
+ logprobs: null,
846
+ finish_reason: null,
847
+ },
848
+ ],
849
+ },
850
+ {
851
+ id: '1',
852
+ object: 'chat.completion.chunk',
853
+ created: 1737563070,
854
+ model: 'deepseek-reasoner',
855
+ system_fingerprint: 'fp_1c5d8833bc',
856
+ choices: [
857
+ {
858
+ index: 0,
859
+ delta: { content: '为您', reasoning_content: '' },
860
+ logprobs: null,
861
+ finish_reason: null,
862
+ },
863
+ ],
864
+ },
865
+ {
866
+ id: '1',
867
+ object: 'chat.completion.chunk',
868
+ created: 1737563070,
869
+ model: 'deepseek-reasoner',
870
+ system_fingerprint: 'fp_1c5d8833bc',
871
+ choices: [
872
+ {
873
+ index: 0,
874
+ delta: { content: '提供', reasoning_content: '' },
875
+ logprobs: null,
876
+ finish_reason: null,
877
+ },
878
+ ],
879
+ },
880
+ {
881
+ id: '1',
882
+ object: 'chat.completion.chunk',
883
+ created: 1737563070,
884
+ model: 'deepseek-reasoner',
885
+ system_fingerprint: 'fp_1c5d8833bc',
886
+ choices: [
887
+ {
888
+ index: 0,
889
+ delta: { content: '帮助。', reasoning_content: '' },
890
+ logprobs: null,
891
+ finish_reason: null,
892
+ },
893
+ ],
894
+ },
895
+ {
896
+ id: '1',
897
+ object: 'chat.completion.chunk',
898
+ created: 1737563070,
899
+ model: 'deepseek-reasoner',
900
+ system_fingerprint: 'fp_1c5d8833bc',
901
+ choices: [
902
+ {
903
+ index: 0,
904
+ delta: { content: '', reasoning_content: '' },
905
+ logprobs: null,
906
+ finish_reason: 'stop',
907
+ },
908
+ ],
909
+ usage: {
910
+ prompt_tokens: 6,
911
+ completion_tokens: 104,
912
+ total_tokens: 110,
913
+ prompt_tokens_details: { cached_tokens: 0 },
914
+ completion_tokens_details: { reasoning_tokens: 70 },
915
+ prompt_cache_hit_tokens: 0,
916
+ prompt_cache_miss_tokens: 6,
917
+ },
918
+ },
919
+ ];
920
+
921
+ const mockOpenAIStream = new ReadableStream({
922
+ start(controller) {
923
+ data.forEach((chunk) => {
924
+ controller.enqueue(chunk);
925
+ });
926
+
927
+ controller.close();
928
+ },
929
+ });
930
+
931
+ const protocolStream = OpenAIStream(mockOpenAIStream);
932
+
933
+ const decoder = new TextDecoder();
934
+ const chunks = [];
935
+
936
+ // @ts-ignore
937
+ for await (const chunk of protocolStream) {
938
+ chunks.push(decoder.decode(chunk, { stream: true }));
939
+ }
940
+
941
+ expect(chunks).toEqual(
942
+ [
943
+ 'id: 1',
944
+ 'event: reasoning',
945
+ `data: ""\n`,
946
+ 'id: 1',
947
+ 'event: reasoning',
948
+ `data: "您好"\n`,
949
+ 'id: 1',
950
+ 'event: reasoning',
951
+ `data: "!"\n`,
952
+ 'id: 1',
953
+ 'event: reasoning',
954
+ `data: ""\n`,
955
+ 'id: 1',
956
+ 'event: text',
957
+ `data: "你好"\n`,
958
+ 'id: 1',
959
+ 'event: text',
960
+ `data: "很高兴"\n`,
961
+ 'id: 1',
962
+ 'event: text',
963
+ `data: "为您"\n`,
964
+ 'id: 1',
965
+ 'event: text',
966
+ `data: "提供"\n`,
967
+ 'id: 1',
968
+ 'event: text',
969
+ `data: "帮助。"\n`,
970
+ 'id: 1',
971
+ 'event: stop',
972
+ `data: "stop"\n`,
973
+ ].map((i) => `${i}\n`),
974
+ );
975
+ });
976
+
758
977
  it('should handle reasoning in litellm', async () => {
759
978
  const data = [
760
979
  {
@@ -87,20 +87,31 @@ export const transformOpenAIStream = (
87
87
  return { data: item.finish_reason, id: chunk.id, type: 'stop' };
88
88
  }
89
89
 
90
- // DeepSeek reasoner will put thinking in the reasoning_content field
91
- // litellm will not set content = null when processing reasoning content
92
- // en: siliconflow has encountered a situation where both content and reasoning_content are present, so the parsing order go ahead
93
- // refs: https://github.com/lobehub/lobe-chat/issues/5681
94
- if (
95
- item.delta &&
96
- 'reasoning_content' in item.delta &&
97
- typeof item.delta.reasoning_content === 'string'
98
- ) {
99
- return { data: item.delta.reasoning_content, id: chunk.id, type: 'reasoning' };
100
- }
90
+ if (item.delta) {
91
+ let reasoning_content =
92
+ 'reasoning_content' in item.delta ? item.delta.reasoning_content : null;
93
+ let content = 'content' in item.delta ? item.delta.content : null;
94
+
95
+ // DeepSeek reasoner will put thinking in the reasoning_content field
96
+ // litellm and not set content = null when processing reasoning content
97
+ // en: siliconflow and aliyun bailian has encountered a situation where both content and reasoning_content are present, so need to handle it
98
+ // refs: https://github.com/lobehub/lobe-chat/issues/5681 (siliconflow)
99
+ // refs: https://github.com/lobehub/lobe-chat/issues/5956 (aliyun bailian)
100
+ if (typeof content === 'string' && typeof reasoning_content === 'string') {
101
+ if (content === '' && reasoning_content === '') {
102
+ content = null;
103
+ } else if (reasoning_content === '') {
104
+ reasoning_content = null;
105
+ }
106
+ }
101
107
 
102
- if (typeof item.delta?.content === 'string') {
103
- return { data: item.delta.content, id: chunk.id, type: 'text' };
108
+ if (typeof reasoning_content === 'string') {
109
+ return { data: reasoning_content, id: chunk.id, type: 'reasoning' };
110
+ }
111
+
112
+ if (typeof content === 'string') {
113
+ return { data: content, id: chunk.id, type: 'text' };
114
+ }
104
115
  }
105
116
 
106
117
  // 无内容情况
@@ -61,6 +61,16 @@ export const transformQwenStream = (chunk: OpenAI.ChatCompletionChunk): StreamPr
61
61
  } as StreamProtocolToolCallChunk;
62
62
  }
63
63
 
64
+ // DeepSeek reasoner will put thinking in the reasoning_content field
65
+ if (
66
+ item.delta &&
67
+ 'reasoning_content' in item.delta &&
68
+ typeof item.delta.reasoning_content === 'string' &&
69
+ item.delta.reasoning_content !== ''
70
+ ) {
71
+ return { data: item.delta.reasoning_content, id: chunk.id, type: 'reasoning' };
72
+ }
73
+
64
74
  if (typeof item.delta?.content === 'string') {
65
75
  return { data: item.delta.content, id: chunk.id, type: 'text' };
66
76
  }