@lobehub/chat 1.50.4 → 1.51.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.env.example +1 -0
  2. package/CHANGELOG.md +60 -0
  3. package/changelog/v1.json +21 -0
  4. package/docs/changelog/2025-01-22-new-ai-provider.mdx +2 -2
  5. package/docs/changelog/2025-02-02-deepseek-r1.mdx +33 -0
  6. package/docs/changelog/2025-02-02-deepseek-r1.zh-CN.mdx +29 -0
  7. package/docs/changelog/index.json +6 -0
  8. package/docs/self-hosting/environment-variables/model-provider.mdx +7 -0
  9. package/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx +7 -0
  10. package/locales/ar/modelProvider.json +4 -0
  11. package/locales/bg-BG/modelProvider.json +4 -0
  12. package/locales/de-DE/modelProvider.json +4 -0
  13. package/locales/en-US/modelProvider.json +4 -0
  14. package/locales/es-ES/modelProvider.json +4 -0
  15. package/locales/fa-IR/modelProvider.json +4 -0
  16. package/locales/fr-FR/modelProvider.json +4 -0
  17. package/locales/it-IT/modelProvider.json +4 -0
  18. package/locales/ja-JP/modelProvider.json +4 -0
  19. package/locales/ko-KR/modelProvider.json +4 -0
  20. package/locales/nl-NL/modelProvider.json +4 -0
  21. package/locales/pl-PL/modelProvider.json +4 -0
  22. package/locales/pt-BR/modelProvider.json +4 -0
  23. package/locales/ru-RU/modelProvider.json +4 -0
  24. package/locales/tr-TR/modelProvider.json +4 -0
  25. package/locales/vi-VN/modelProvider.json +4 -0
  26. package/locales/zh-CN/modelProvider.json +4 -0
  27. package/locales/zh-TW/modelProvider.json +4 -0
  28. package/package.json +1 -1
  29. package/src/app/(main)/settings/provider/features/ModelList/CreateNewModelModal/Form.tsx +8 -0
  30. package/src/config/aiModels/github.ts +30 -2
  31. package/src/config/aiModels/qwen.ts +139 -10
  32. package/src/config/modelProviders/github.ts +27 -3
  33. package/src/config/modelProviders/qwen.ts +90 -12
  34. package/src/hooks/useModelSupportReasoning.ts +15 -0
  35. package/src/libs/agent-runtime/siliconcloud/index.ts +3 -2
  36. package/src/locales/default/modelProvider.ts +5 -0
  37. package/src/store/aiInfra/slices/aiModel/action.ts +1 -0
  38. package/src/store/aiInfra/slices/aiModel/selectors.ts +7 -0
  39. package/src/store/user/slices/modelList/selectors/modelProvider.ts +4 -0
  40. package/src/types/aiModel.ts +5 -0
  41. package/src/types/llm.ts +9 -0
  42. package/src/utils/_deprecated/parseModels.test.ts +11 -0
  43. package/src/utils/_deprecated/parseModels.ts +4 -0
  44. package/src/utils/merge.test.ts +56 -0
  45. package/src/utils/merge.ts +3 -2
  46. package/src/utils/parseModels.test.ts +14 -0
  47. package/src/utils/parseModels.ts +4 -0
@@ -52,6 +52,17 @@ describe('parseModelString', () => {
52
52
  });
53
53
  });
54
54
 
55
+ it('token and reasoning', () => {
56
+ const result = parseModelString('deepseek-r1=Deepseek R1<65536:reasoning>');
57
+
58
+ expect(result.add[0]).toEqual({
59
+ displayName: 'Deepseek R1',
60
+ reasoning: true,
61
+ id: 'deepseek-r1',
62
+ contextWindowTokens: 65_536,
63
+ });
64
+ });
65
+
55
66
  it('multi models', () => {
56
67
  const result = parseModelString(
57
68
  'gemini-1.5-flash-latest=Gemini 1.5 Flash<16000:vision>,gpt-4-all=ChatGPT Plus<128000:fc:vision:file>',
@@ -60,6 +60,10 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
60
60
 
61
61
  for (const capability of capabilityList) {
62
62
  switch (capability) {
63
+ case 'reasoning': {
64
+ model.reasoning = true;
65
+ break;
66
+ }
63
67
  case 'vision': {
64
68
  model.vision = true;
65
69
  break;
@@ -45,4 +45,60 @@ describe('mergeArrayById', () => {
45
45
  },
46
46
  ]);
47
47
  });
48
+
49
+ it('should merge data with objects', () => {
50
+ const data = mergeArrayById(
51
+ [
52
+ {
53
+ contextWindowTokens: 128_000,
54
+ description:
55
+ 'o1-mini是一款针对编程、数学和科学应用场景而设计的快速、经济高效的推理模型。该模型具有128K上下文和2023年10月的知识截止日期。',
56
+ displayName: 'OpenAI o1-mini',
57
+ enabled: true,
58
+ id: 'o3-mini',
59
+ abilities: {
60
+ functionCall: true,
61
+ },
62
+ maxOutput: 65_536,
63
+ pricing: {
64
+ input: 3,
65
+ output: 12,
66
+ },
67
+ releasedAt: '2024-09-12',
68
+ type: 'chat',
69
+ },
70
+ ],
71
+ [
72
+ {
73
+ id: 'o3-mini',
74
+ contextWindowTokens: null,
75
+ displayName: 'OpenAI o1-mini ABC',
76
+ type: 'chat',
77
+ abilities: {},
78
+ enabled: false,
79
+ },
80
+ ],
81
+ );
82
+
83
+ expect(data).toEqual([
84
+ {
85
+ contextWindowTokens: 128_000,
86
+ description:
87
+ 'o1-mini是一款针对编程、数学和科学应用场景而设计的快速、经济高效的推理模型。该模型具有128K上下文和2023年10月的知识截止日期。',
88
+ displayName: 'OpenAI o1-mini ABC',
89
+ enabled: false,
90
+ id: 'o3-mini',
91
+ maxOutput: 65_536,
92
+ pricing: {
93
+ input: 3,
94
+ output: 12,
95
+ },
96
+ abilities: {
97
+ functionCall: true,
98
+ },
99
+ releasedAt: '2024-09-12',
100
+ type: 'chat',
101
+ },
102
+ ]);
103
+ });
48
104
  });
@@ -1,4 +1,4 @@
1
- import { merge as _merge, mergeWith } from 'lodash-es';
1
+ import { merge as _merge, isEmpty, mergeWith } from 'lodash-es';
2
2
 
3
3
  /**
4
4
  * 用于合并对象,如果是数组则直接替换
@@ -33,7 +33,8 @@ export const mergeArrayById = <T extends MergeableItem>(defaultItems: T[], userI
33
33
  const mergedItem: T = { ...defaultItem };
34
34
  Object.entries(userItem).forEach(([key, value]) => {
35
35
  // Only use user value if it's not null and not undefined
36
- if (value !== null && value !== undefined) {
36
+ // and not empty object
37
+ if (value !== null && value !== undefined && !(typeof value === 'object' && isEmpty(value))) {
37
38
  // @ts-expect-error
38
39
  mergedItem[key] = value;
39
40
  }
@@ -58,6 +58,20 @@ describe('parseModelString', () => {
58
58
  });
59
59
  });
60
60
 
61
+ it('token and reasoning', () => {
62
+ const result = parseModelString('deepseek-r1=Deepseek R1<65536:reasoning>');
63
+
64
+ expect(result.add[0]).toEqual({
65
+ displayName: 'Deepseek R1',
66
+ abilities: {
67
+ reasoning: true,
68
+ },
69
+ id: 'deepseek-r1',
70
+ contextWindowTokens: 65_536,
71
+ type: 'chat',
72
+ });
73
+ });
74
+
61
75
  it('multi models', () => {
62
76
  const result = parseModelString(
63
77
  'gemini-1.5-flash-latest=Gemini 1.5 Flash<16000:vision>,gpt-4-all=ChatGPT Plus<128000:fc:vision:file>',
@@ -64,6 +64,10 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
64
64
 
65
65
  for (const capability of capabilityList) {
66
66
  switch (capability) {
67
+ case 'reasoning': {
68
+ model.abilities!.reasoning = true;
69
+ break;
70
+ }
67
71
  case 'vision': {
68
72
  model.abilities!.vision = true;
69
73
  break;