@lobehub/chat 1.47.7 → 1.47.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ {
2
+ "image": "mcr.microsoft.com/devcontainers/universal:2",
3
+ "features": {
4
+ "ghcr.io/devcontainers/features/node:1": {}
5
+ }
6
+ }
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.47.9](https://github.com/lobehub/lobe-chat/compare/v1.47.8...v1.47.9)
6
+
7
+ <sup>Released on **2025-01-20**</sup>
8
+
9
+ #### ♻ Code Refactoring
10
+
11
+ - **misc**: Improve error code.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### Code refactoring
19
+
20
+ - **misc**: Improve error code, closes [#5525](https://github.com/lobehub/lobe-chat/issues/5525) ([4fc4fa6](https://github.com/lobehub/lobe-chat/commit/4fc4fa6))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.47.8](https://github.com/lobehub/lobe-chat/compare/v1.47.7...v1.47.8)
31
+
32
+ <sup>Released on **2025-01-20**</sup>
33
+
34
+ #### 💄 Styles
35
+
36
+ - **misc**: Add deepseek r1 model.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### Styles
44
+
45
+ - **misc**: Add deepseek r1 model, closes [#5520](https://github.com/lobehub/lobe-chat/issues/5520) ([414477f](https://github.com/lobehub/lobe-chat/commit/414477f))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ### [Version 1.47.7](https://github.com/lobehub/lobe-chat/compare/v1.47.6...v1.47.7)
6
56
 
7
57
  <sup>Released on **2025-01-20**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,22 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "improvements": [
5
+ "Improve error code."
6
+ ]
7
+ },
8
+ "date": "2025-01-20",
9
+ "version": "1.47.9"
10
+ },
11
+ {
12
+ "children": {
13
+ "improvements": [
14
+ "Add deepseek r1 model."
15
+ ]
16
+ },
17
+ "date": "2025-01-20",
18
+ "version": "1.47.8"
19
+ },
2
20
  {
3
21
  "children": {
4
22
  "improvements": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.47.7",
3
+ "version": "1.47.9",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -20,6 +20,22 @@ const deepseekChatModels: AIChatModelCard[] = [
20
20
  releasedAt: '2024-12-26',
21
21
  type: 'chat',
22
22
  },
23
+ {
24
+ contextWindowTokens: 65_536,
25
+ description:
26
+ 'DeepSeek 推出的推理模型。在输出最终回答之前,模型会先输出一段思维链内容,以提升最终答案的准确性。',
27
+ displayName: 'DeepSeek R1',
28
+ enabled: true,
29
+ id: 'deepseek-reasoner',
30
+ pricing: {
31
+ cachedInput: 1,
32
+ currency: 'CNY',
33
+ input: 4,
34
+ output: 16,
35
+ },
36
+ releasedAt: '2025-01-20',
37
+ type: 'chat',
38
+ },
23
39
  ];
24
40
 
25
41
  export const allModels = [...deepseekChatModels];
@@ -11,14 +11,29 @@ const DeepSeek: ModelProviderCard = {
11
11
  enabled: true,
12
12
  functionCall: true,
13
13
  id: 'deepseek-chat',
14
- pricing: {
15
- cachedInput: 0.5,
14
+ pricing: { // 2025.2.9 之后涨价
15
+ cachedInput: 0.1,
16
16
  currency: 'CNY',
17
- input: 2,
18
- output: 8,
17
+ input: 1,
18
+ output: 2,
19
19
  },
20
20
  releasedAt: '2024-12-26',
21
21
  },
22
+ {
23
+ contextWindowTokens: 65_536,
24
+ description:
25
+ 'DeepSeek 推出的推理模型。在输出最终回答之前,模型会先输出一段思维链内容,以提升最终答案的准确性。',
26
+ displayName: 'DeepSeek R1',
27
+ enabled: true,
28
+ id: 'deepseek-reasoner',
29
+ pricing: {
30
+ cachedInput: 1,
31
+ currency: 'CNY',
32
+ input: 4,
33
+ output: 16,
34
+ },
35
+ releasedAt: '2025-01-20',
36
+ },
22
37
  ],
23
38
  checkModel: 'deepseek-chat',
24
39
  description:
@@ -39,6 +39,9 @@ export const userSettings = pgTable('user_settings', {
39
39
  .primaryKey(),
40
40
 
41
41
  tts: jsonb('tts'),
42
+ /**
43
+ * @deprecated
44
+ */
42
45
  keyVaults: text('key_vaults'),
43
46
  general: jsonb('general'),
44
47
  languageModel: jsonb('language_model'),
@@ -1,8 +1,30 @@
1
- import { ModelProvider } from '../types';
1
+ import OpenAI from 'openai';
2
+
3
+ import { ChatStreamPayload, ModelProvider } from '../types';
2
4
  import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
3
5
 
4
6
  export const LobeDeepSeekAI = LobeOpenAICompatibleFactory({
5
7
  baseURL: 'https://api.deepseek.com/v1',
8
+ chatCompletion: {
9
+ handlePayload: ({ frequency_penalty, model, presence_penalty, temperature, top_p, ...payload }: ChatStreamPayload) =>
10
+ ({
11
+ ...payload,
12
+ model,
13
+ ...(model === 'deepseek-reasoner'
14
+ ? {
15
+ frequency_penalty: undefined,
16
+ presence_penalty: undefined,
17
+ temperature: undefined,
18
+ top_p: undefined,
19
+ }
20
+ : {
21
+ frequency_penalty,
22
+ presence_penalty,
23
+ temperature,
24
+ top_p,
25
+ }),
26
+ }) as OpenAI.ChatCompletionCreateParamsStreaming,
27
+ },
6
28
  debug: {
7
29
  chatCompletion: () => process.env.DEBUG_DEEPSEEK_CHAT_COMPLETION === '1',
8
30
  },
@@ -12,6 +12,12 @@ export const genServerAiProvidersConfig = (specificConfig: Record<any, any>) =>
12
12
  (config, provider) => {
13
13
  const providerUpperCase = provider.toUpperCase();
14
14
  const providerCard = AiModels[provider] as AiFullModelCard[];
15
+
16
+ if (!providerCard)
17
+ throw new Error(
18
+ `Provider [${provider}] not found in aiModels, please make sure you have exported the provider in the \`aiModels/index.ts\``,
19
+ );
20
+
15
21
  const providerConfig = specificConfig[provider as keyof typeof specificConfig] || {};
16
22
  const providerModelList =
17
23
  process.env[providerConfig.modelListKey ?? `${providerUpperCase}_MODEL_LIST`];