@lobehub/chat 1.21.14 → 1.21.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,23 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.21.15](https://github.com/lobehub/lobe-chat/compare/v1.21.14...v1.21.15)
6
+
7
+ <sup>Released on **2024-10-12**</sup>
8
+
9
+ <br/>
10
+
11
+ <details>
12
+ <summary><kbd>Improvements and Fixes</kbd></summary>
13
+
14
+ </details>
15
+
16
+ <div align="right">
17
+
18
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
19
+
20
+ </div>
21
+
5
22
  ### [Version 1.21.14](https://github.com/lobehub/lobe-chat/compare/v1.21.13...v1.21.14)
6
23
 
7
24
  <sup>Released on **2024-10-12**</sup>
package/netlify.toml CHANGED
@@ -3,7 +3,7 @@ command = "pnpm run build"
3
3
  publish = ".next"
4
4
 
5
5
  [build.environment]
6
- NODE_OPTIONS = "--max_old_space_size=8192"
6
+ NODE_OPTIONS = "--max_old_space_size=4096"
7
7
 
8
8
  [template.environment]
9
9
  OPENAI_API_KEY = "set your OpenAI API Key"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.21.14",
3
+ "version": "1.21.15",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -7,6 +7,7 @@ const SiliconCloud: ModelProviderCard = {
7
7
  description: 'DeepSeek V2.5 集合了先前版本的优秀特征,增强了通用和编码能力。',
8
8
  displayName: 'DeepSeek V2.5',
9
9
  enabled: true,
10
+ functionCall: true,
10
11
  id: 'deepseek-ai/DeepSeek-V2.5',
11
12
  pricing: {
12
13
  currency: 'CNY',
@@ -19,6 +20,7 @@ const SiliconCloud: ModelProviderCard = {
19
20
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
20
21
  displayName: 'Qwen2.5 7B',
21
22
  enabled: true,
23
+ functionCall: true,
22
24
  id: 'Qwen/Qwen2.5-7B-Instruct',
23
25
  pricing: {
24
26
  currency: 'CNY',
@@ -30,6 +32,7 @@ const SiliconCloud: ModelProviderCard = {
30
32
  {
31
33
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
32
34
  displayName: 'Qwen2.5 14B',
35
+ functionCall: true,
33
36
  id: 'Qwen/Qwen2.5-14B-Instruct',
34
37
  pricing: {
35
38
  currency: 'CNY',
@@ -41,6 +44,7 @@ const SiliconCloud: ModelProviderCard = {
41
44
  {
42
45
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
43
46
  displayName: 'Qwen2.5 32B',
47
+ functionCall: true,
44
48
  id: 'Qwen/Qwen2.5-32B-Instruct',
45
49
  pricing: {
46
50
  currency: 'CNY',
@@ -53,6 +57,7 @@ const SiliconCloud: ModelProviderCard = {
53
57
  description: 'Qwen2.5 是全新的大型语言模型系列,具有更强的理解和生成能力。',
54
58
  displayName: 'Qwen2.5 72B',
55
59
  enabled: true,
60
+ functionCall: true,
56
61
  id: 'Qwen/Qwen2.5-72B-Instruct',
57
62
  pricing: {
58
63
  currency: 'CNY',
@@ -88,6 +93,7 @@ const SiliconCloud: ModelProviderCard = {
88
93
  {
89
94
  description: 'InternLM2.5 提供多场景下的智能对话解决方案。',
90
95
  displayName: 'Internlm 2.5 7B',
96
+ functionCall: true,
91
97
  id: 'internlm/internlm2_5-7b-chat',
92
98
  pricing: {
93
99
  currency: 'CNY',
@@ -99,6 +105,7 @@ const SiliconCloud: ModelProviderCard = {
99
105
  {
100
106
  description: '创新的开源模型InternLM2.5,通过大规模的参数提高了对话智能。',
101
107
  displayName: 'Internlm 2.5 20B',
108
+ functionCall: true,
102
109
  id: 'internlm/internlm2_5-20b-chat',
103
110
  pricing: {
104
111
  currency: 'CNY',
@@ -110,6 +117,7 @@ const SiliconCloud: ModelProviderCard = {
110
117
  {
111
118
  description: 'GLM-4 9B 开放源码版本,为会话应用提供优化后的对话体验。',
112
119
  displayName: 'GLM-4 9B',
120
+ functionCall: true,
113
121
  id: 'THUDM/glm-4-9b-chat',
114
122
  pricing: {
115
123
  currency: 'CNY',