@lobehub/chat 1.75.4 → 1.76.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +52 -0
- package/README.md +1 -1
- package/README.zh-CN.md +1 -1
- package/changelog/v1.json +18 -0
- package/docs/developer/database-schema.dbml +1 -0
- package/docs/self-hosting/advanced/model-list.mdx +5 -3
- package/docs/self-hosting/advanced/model-list.zh-CN.mdx +5 -3
- package/docs/usage/providers/infiniai.zh-CN.mdx +4 -0
- package/locales/ar/hotkey.json +46 -0
- package/locales/ar/models.json +51 -54
- package/locales/ar/providers.json +3 -0
- package/locales/ar/setting.json +12 -0
- package/locales/bg-BG/hotkey.json +46 -0
- package/locales/bg-BG/models.json +51 -54
- package/locales/bg-BG/providers.json +3 -0
- package/locales/bg-BG/setting.json +12 -0
- package/locales/de-DE/hotkey.json +46 -0
- package/locales/de-DE/models.json +51 -54
- package/locales/de-DE/providers.json +3 -0
- package/locales/de-DE/setting.json +12 -0
- package/locales/en-US/hotkey.json +46 -0
- package/locales/en-US/models.json +51 -54
- package/locales/en-US/providers.json +3 -0
- package/locales/en-US/setting.json +12 -0
- package/locales/es-ES/hotkey.json +46 -0
- package/locales/es-ES/models.json +51 -54
- package/locales/es-ES/providers.json +3 -0
- package/locales/es-ES/setting.json +12 -0
- package/locales/fa-IR/hotkey.json +46 -0
- package/locales/fa-IR/models.json +51 -54
- package/locales/fa-IR/providers.json +3 -0
- package/locales/fa-IR/setting.json +12 -0
- package/locales/fr-FR/hotkey.json +46 -0
- package/locales/fr-FR/models.json +51 -54
- package/locales/fr-FR/providers.json +3 -0
- package/locales/fr-FR/setting.json +12 -0
- package/locales/it-IT/hotkey.json +46 -0
- package/locales/it-IT/models.json +51 -54
- package/locales/it-IT/providers.json +3 -0
- package/locales/it-IT/setting.json +12 -0
- package/locales/ja-JP/hotkey.json +46 -0
- package/locales/ja-JP/models.json +51 -54
- package/locales/ja-JP/providers.json +3 -0
- package/locales/ja-JP/setting.json +12 -0
- package/locales/ko-KR/hotkey.json +46 -0
- package/locales/ko-KR/models.json +51 -54
- package/locales/ko-KR/providers.json +3 -0
- package/locales/ko-KR/setting.json +12 -0
- package/locales/nl-NL/hotkey.json +46 -0
- package/locales/nl-NL/models.json +51 -54
- package/locales/nl-NL/providers.json +3 -0
- package/locales/nl-NL/setting.json +12 -0
- package/locales/pl-PL/hotkey.json +46 -0
- package/locales/pl-PL/models.json +51 -54
- package/locales/pl-PL/providers.json +3 -0
- package/locales/pl-PL/setting.json +12 -0
- package/locales/pt-BR/hotkey.json +46 -0
- package/locales/pt-BR/models.json +51 -54
- package/locales/pt-BR/providers.json +3 -0
- package/locales/pt-BR/setting.json +12 -0
- package/locales/ru-RU/hotkey.json +46 -0
- package/locales/ru-RU/models.json +51 -54
- package/locales/ru-RU/providers.json +3 -0
- package/locales/ru-RU/setting.json +12 -0
- package/locales/tr-TR/hotkey.json +46 -0
- package/locales/tr-TR/models.json +51 -54
- package/locales/tr-TR/providers.json +3 -0
- package/locales/tr-TR/setting.json +12 -0
- package/locales/vi-VN/hotkey.json +46 -0
- package/locales/vi-VN/models.json +51 -54
- package/locales/vi-VN/providers.json +3 -0
- package/locales/vi-VN/setting.json +12 -0
- package/locales/zh-CN/hotkey.json +46 -0
- package/locales/zh-CN/models.json +55 -58
- package/locales/zh-CN/providers.json +3 -0
- package/locales/zh-CN/setting.json +12 -0
- package/locales/zh-TW/hotkey.json +46 -0
- package/locales/zh-TW/models.json +51 -54
- package/locales/zh-TW/providers.json +3 -0
- package/locales/zh-TW/setting.json +12 -0
- package/package.json +3 -3
- package/src/app/[variants]/(main)/(mobile)/me/(home)/features/Category.tsx +1 -1
- package/src/app/[variants]/(main)/(mobile)/me/(home)/layout.tsx +3 -2
- package/src/app/[variants]/(main)/(mobile)/me/data/features/Category.tsx +1 -1
- package/src/app/[variants]/(main)/(mobile)/me/profile/features/Category.tsx +1 -1
- package/src/app/[variants]/(main)/(mobile)/me/settings/features/Category.tsx +1 -1
- package/src/app/[variants]/(main)/_layout/Desktop/RegisterHotkeys.tsx +11 -0
- package/src/app/[variants]/(main)/_layout/Desktop/SideBar/PinList/index.tsx +6 -23
- package/src/app/[variants]/(main)/_layout/Desktop/SideBar/TopActions.test.tsx +2 -0
- package/src/app/[variants]/(main)/_layout/Desktop/index.tsx +11 -4
- package/src/app/[variants]/(main)/chat/(workspace)/@conversation/features/ChatInput/Desktop/Footer/SendMore.tsx +6 -21
- package/src/app/[variants]/(main)/chat/(workspace)/@conversation/features/ChatInput/Desktop/Footer/ShortcutHint.tsx +13 -34
- package/src/app/[variants]/(main)/chat/(workspace)/@conversation/features/ChatInput/Desktop/Footer/index.tsx +1 -1
- package/src/app/[variants]/(main)/chat/(workspace)/@conversation/features/ZenModeToast/Toast.tsx +7 -4
- package/src/app/[variants]/(main)/chat/(workspace)/_layout/Desktop/ChatHeader/HeaderAction.tsx +12 -8
- package/src/app/[variants]/(main)/chat/(workspace)/_layout/Desktop/ChatHeader/Main.tsx +24 -30
- package/src/app/[variants]/(main)/chat/(workspace)/_layout/Desktop/index.tsx +0 -2
- package/src/app/[variants]/(main)/chat/(workspace)/features/SettingButton.tsx +12 -7
- package/src/app/[variants]/(main)/chat/@session/features/SessionSearchBar.tsx +5 -1
- package/src/app/[variants]/(main)/chat/_layout/Desktop/RegisterHotkeys.tsx +10 -0
- package/src/app/[variants]/(main)/chat/_layout/Desktop/index.tsx +5 -0
- package/src/app/[variants]/(main)/chat/_layout/Mobile.tsx +1 -1
- package/src/app/[variants]/(main)/discover/features/StoreSearchBar.tsx +5 -1
- package/src/app/[variants]/(main)/settings/hooks/useCategory.tsx +31 -21
- package/src/app/[variants]/(main)/settings/hotkey/features/HotkeySetting.tsx +80 -0
- package/src/app/[variants]/(main)/settings/hotkey/index.tsx +9 -0
- package/src/app/[variants]/(main)/settings/hotkey/page.tsx +15 -0
- package/src/app/[variants]/layout.tsx +16 -13
- package/src/config/aiModels/infiniai.ts +52 -55
- package/src/config/aiModels/siliconcloud.ts +17 -1
- package/src/config/aiModels/tencentcloud.ts +17 -0
- package/src/const/hotkeys.ts +80 -10
- package/src/const/settings/hotkey.ts +10 -0
- package/src/const/settings/index.ts +3 -0
- package/src/database/client/migrations.json +46 -32
- package/src/database/migrations/0019_add_hotkey_user_settings.sql +2 -0
- package/src/database/migrations/meta/0019_snapshot.json +4218 -0
- package/src/database/migrations/meta/_journal.json +7 -0
- package/src/database/schemas/user.ts +1 -0
- package/src/database/server/models/user.ts +2 -0
- package/src/features/ChatInput/Desktop/InputArea/index.tsx +8 -0
- package/src/features/ChatInput/Desktop/index.tsx +0 -1
- package/src/features/ChatInput/Topic/index.tsx +10 -15
- package/src/features/FileManager/Header/FilesSearchBar.tsx +6 -2
- package/src/features/HotkeyHelperPanel/HotkeyContent.tsx +62 -0
- package/src/features/HotkeyHelperPanel/index.tsx +59 -0
- package/src/hooks/useHotkeys/chatScope.ts +105 -0
- package/src/hooks/useHotkeys/globalScope.ts +69 -0
- package/src/hooks/useHotkeys/index.ts +2 -0
- package/src/hooks/useHotkeys/useHotkeyById.test.ts +194 -0
- package/src/hooks/useHotkeys/useHotkeyById.ts +57 -0
- package/src/libs/agent-runtime/infiniai/index.ts +38 -3
- package/src/locales/default/hotkey.ts +50 -0
- package/src/locales/default/index.ts +2 -0
- package/src/locales/default/setting.ts +12 -0
- package/src/store/global/initialState.ts +3 -0
- package/src/store/user/slices/settings/selectors/__snapshots__/settings.test.ts.snap +79 -0
- package/src/store/user/slices/settings/selectors/settings.test.ts +131 -0
- package/src/store/user/slices/settings/selectors/settings.ts +6 -0
- package/src/types/hotkey.ts +59 -0
- package/src/types/user/settings/hotkey.ts +3 -0
- package/src/types/user/settings/index.ts +3 -0
- package/src/utils/format.ts +1 -1
- package/src/utils/parseModels.test.ts +14 -0
- package/src/utils/parseModels.ts +4 -0
- package/src/app/[variants]/(main)/chat/(workspace)/_layout/Desktop/HotKeys.tsx +0 -44
- package/src/components/HotKeys/index.tsx +0 -77
@@ -2,6 +2,7 @@ import { AIChatModelCard } from '@/types/aiModel';
|
|
2
2
|
|
3
3
|
// https://cloud.infini-ai.com/genstudio/model
|
4
4
|
// All models are currently free
|
5
|
+
// Currently the platform doesn't support Function Call
|
5
6
|
|
6
7
|
const infiniaiChatModels: AIChatModelCard[] = [
|
7
8
|
{
|
@@ -24,8 +25,8 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
24
25
|
{
|
25
26
|
contextWindowTokens: 65_536,
|
26
27
|
description:
|
27
|
-
'DeepSeek-V3 是一个强大的专家混合(MoE)语言模型,总参数量为 671B,每个 Token 激活 37B 参数。该模型采用多头潜在注意力(MLA)和 DeepSeekMoE
|
28
|
-
displayName: 'DeepSeek V3',
|
28
|
+
'DeepSeek-V3-0324 是一个强大的专家混合(MoE)语言模型,总参数量为 671B,每个 Token 激活 37B 参数。该模型采用多头潜在注意力(MLA)和 DeepSeekMoE 架构,实现了高效推理和经济训练,并在前代 DeepSeek-V3 的基础上显著提升了性能。',
|
29
|
+
displayName: 'DeepSeek V3 0324',
|
29
30
|
enabled: true,
|
30
31
|
id: 'deepseek-v3',
|
31
32
|
pricing: {
|
@@ -54,7 +55,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
54
55
|
description:
|
55
56
|
'DeepSeek-R1-Distill-Qwen-32B 是基于 DeepSeek-R1 蒸馏而来的模型,在 Qwen2.5-32B 的基础上使用 DeepSeek-R1 生成的样本进行微调。该模型在各种基准测试中表现出色,保持了强大的推理能力。',
|
56
57
|
displayName: 'DeepSeek R1 Distill Qwen 32B',
|
57
|
-
enabled: true,
|
58
58
|
id: 'deepseek-r1-distill-qwen-32b',
|
59
59
|
pricing: {
|
60
60
|
currency: 'CNY',
|
@@ -64,12 +64,15 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
64
64
|
type: 'chat',
|
65
65
|
},
|
66
66
|
{
|
67
|
-
|
67
|
+
abilities: {
|
68
|
+
vision: true,
|
69
|
+
},
|
70
|
+
contextWindowTokens: 125_000,
|
68
71
|
description:
|
69
|
-
'Qwen2.5
|
70
|
-
displayName: 'Qwen2.5 72B Instruct',
|
72
|
+
'Qwen2.5-VL 系列模型提升了模型的智能水平、实用性和适用性,使其在自然对话、内容创作、专业知识服务及代码开发等场景中表现更优。旗舰模型 Qwen2.5-VL-72B-Instruct 在涵盖多个领域和任务的基准测试中展现出强大的竞争力,包括大学水平的问题解答、数学、文档理解、通用问答、视频理解以及视觉代理任务等。',
|
73
|
+
displayName: 'Qwen2.5 VL 72B Instruct',
|
71
74
|
enabled: true,
|
72
|
-
id: 'qwen2.5-72b-instruct',
|
75
|
+
id: 'qwen2.5-vl-72b-instruct',
|
73
76
|
pricing: {
|
74
77
|
currency: 'CNY',
|
75
78
|
input: 0,
|
@@ -78,12 +81,15 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
78
81
|
type: 'chat',
|
79
82
|
},
|
80
83
|
{
|
81
|
-
|
84
|
+
abilities: {
|
85
|
+
vision: true,
|
86
|
+
},
|
87
|
+
contextWindowTokens: 125_000,
|
82
88
|
description:
|
83
|
-
'Qwen2.5
|
84
|
-
displayName: 'Qwen2.5 32B Instruct',
|
89
|
+
'Qwen2.5-VL 系列模型提升了模型的智能水平、实用性和适用性,使其在自然对话、内容创作、专业知识服务及代码开发等场景中表现更优。32B 版本使用了强化学习技术优化模型,与 Qwen2.5 VL 系列的其它模型相比,提供了更符合人类偏好的输出风格、复杂数学问题的推理能力,以及图像细粒度理解与推理能力。',
|
90
|
+
displayName: 'Qwen2.5 VL 32B Instruct',
|
85
91
|
enabled: true,
|
86
|
-
id: 'qwen2.5-32b-instruct',
|
92
|
+
id: 'qwen2.5-vl-32b-instruct',
|
87
93
|
pricing: {
|
88
94
|
currency: 'CNY',
|
89
95
|
input: 0,
|
@@ -92,12 +98,14 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
92
98
|
type: 'chat',
|
93
99
|
},
|
94
100
|
{
|
95
|
-
|
101
|
+
abilities: {
|
102
|
+
vision: true,
|
103
|
+
},
|
104
|
+
contextWindowTokens: 125_000,
|
96
105
|
description:
|
97
|
-
'Qwen2.5-
|
98
|
-
displayName: 'Qwen2.5
|
99
|
-
|
100
|
-
id: 'qwen2.5-coder-32b-instruct',
|
106
|
+
'Qwen2.5-VL 系列模型提升了模型的智能水平、实用性和适用性,使其在自然对话、内容创作、专业知识服务及代码开发等场景中表现更优。',
|
107
|
+
displayName: 'Qwen2.5 VL 7B Instruct',
|
108
|
+
id: 'qwen2.5-vl-7b-instruct',
|
101
109
|
pricing: {
|
102
110
|
currency: 'CNY',
|
103
111
|
input: 0,
|
@@ -109,9 +117,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
109
117
|
contextWindowTokens: 32_768,
|
110
118
|
description:
|
111
119
|
'Qwen2.5 是 Qwen 大型语言模型系列的最新成果。Qwen2.5 发布了从 0.5 到 720 亿参数不等的基础语言模型及指令调优语言模型。Qwen2.5 相比 Qwen2 带来了以下改进:\n显著增加知识量,在编程与数学领域的能力得到极大提升。\n在遵循指令、生成长文本、理解结构化数据 (例如,表格) 以及生成结构化输出特别是 JSON 方面有显著提升。对系统提示的多样性更具韧性,增强了聊天机器人中的角色扮演实现和条件设定。\n支持长上下文处理。\n支持超过 29 种语言的多语言功能,包括中文、英语、法语、西班牙语、葡萄牙语、德语、意大利语、俄语、日语、韩语、越南语、泰语、阿拉伯语等。',
|
112
|
-
displayName: 'Qwen2.5
|
120
|
+
displayName: 'Qwen2.5 72B Instruct',
|
113
121
|
enabled: true,
|
114
|
-
id: 'qwen2.5-
|
122
|
+
id: 'qwen2.5-72b-instruct',
|
115
123
|
pricing: {
|
116
124
|
currency: 'CNY',
|
117
125
|
input: 0,
|
@@ -123,9 +131,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
123
131
|
contextWindowTokens: 32_768,
|
124
132
|
description:
|
125
133
|
'Qwen2.5 是 Qwen 大型语言模型系列的最新成果。Qwen2.5 发布了从 0.5 到 720 亿参数不等的基础语言模型及指令调优语言模型。Qwen2.5 相比 Qwen2 带来了以下改进:\n显著增加知识量,在编程与数学领域的能力得到极大提升。\n在遵循指令、生成长文本、理解结构化数据 (例如,表格) 以及生成结构化输出特别是 JSON 方面有显著提升。对系统提示的多样性更具韧性,增强了聊天机器人中的角色扮演实现和条件设定。\n支持长上下文处理。\n支持超过 29 种语言的多语言功能,包括中文、英语、法语、西班牙语、葡萄牙语、德语、意大利语、俄语、日语、韩语、越南语、泰语、阿拉伯语等。',
|
126
|
-
displayName: 'Qwen2.5
|
134
|
+
displayName: 'Qwen2.5 32B Instruct',
|
127
135
|
enabled: true,
|
128
|
-
id: 'qwen2.5-
|
136
|
+
id: 'qwen2.5-32b-instruct',
|
129
137
|
pricing: {
|
130
138
|
currency: 'CNY',
|
131
139
|
input: 0,
|
@@ -134,12 +142,11 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
134
142
|
type: 'chat',
|
135
143
|
},
|
136
144
|
{
|
137
|
-
contextWindowTokens:
|
145
|
+
contextWindowTokens: 32_768,
|
138
146
|
description:
|
139
|
-
'
|
140
|
-
displayName: '
|
141
|
-
|
142
|
-
id: 'llama-3.3-70b-instruct',
|
147
|
+
'Qwen2.5-Coder 是最新的代码专用 Qwen 大型语言模型系列。Qwen2.5-Coder 在 CodeQwen1.5 的基础上带来了以下改进:\n显著提升代码生成、代码推理和代码修复能力。\n支持真实世界应用,例如代码代理,增强编码能力和数学及一般能力。\n支持长上下文处理。',
|
148
|
+
displayName: 'Qwen2.5 Coder 32B Instruct',
|
149
|
+
id: 'qwen2.5-coder-32b-instruct',
|
143
150
|
pricing: {
|
144
151
|
currency: 'CNY',
|
145
152
|
input: 0,
|
@@ -150,10 +157,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
150
157
|
{
|
151
158
|
contextWindowTokens: 32_768,
|
152
159
|
description:
|
153
|
-
'Qwen2 是 Qwen
|
154
|
-
displayName: '
|
155
|
-
|
156
|
-
id: 'qwen2-72b-instruct',
|
160
|
+
'Qwen2.5 是 Qwen 大型语言模型系列的最新成果。Qwen2.5 发布了从 0.5 到 720 亿参数不等的基础语言模型及指令调优语言模型。Qwen2.5 相比 Qwen2 带来了以下改进:\n显著增加知识量,在编程与数学领域的能力得到极大提升。\n在遵循指令、生成长文本、理解结构化数据 (例如,表格) 以及生成结构化输出特别是 JSON 方面有显著提升。对系统提示的多样性更具韧性,增强了聊天机器人中的角色扮演实现和条件设定。\n支持长上下文处理。\n支持超过 29 种语言的多语言功能,包括中文、英语、法语、西班牙语、葡萄牙语、德语、意大利语、俄语、日语、韩语、越南语、泰语、阿拉伯语等。',
|
161
|
+
displayName: 'Qwen2.5 14B Instruct',
|
162
|
+
id: 'qwen2.5-14b-instruct',
|
157
163
|
pricing: {
|
158
164
|
currency: 'CNY',
|
159
165
|
input: 0,
|
@@ -164,10 +170,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
164
170
|
{
|
165
171
|
contextWindowTokens: 32_768,
|
166
172
|
description:
|
167
|
-
'Qwen2 是 Qwen
|
168
|
-
displayName: '
|
169
|
-
|
170
|
-
id: 'qwen2-7b-instruct',
|
173
|
+
'Qwen2.5 是 Qwen 大型语言模型系列的最新成果。Qwen2.5 发布了从 0.5 到 720 亿参数不等的基础语言模型及指令调优语言模型。Qwen2.5 相比 Qwen2 带来了以下改进:\n显著增加知识量,在编程与数学领域的能力得到极大提升。\n在遵循指令、生成长文本、理解结构化数据 (例如,表格) 以及生成结构化输出特别是 JSON 方面有显著提升。对系统提示的多样性更具韧性,增强了聊天机器人中的角色扮演实现和条件设定。\n支持长上下文处理。\n支持超过 29 种语言的多语言功能,包括中文、英语、法语、西班牙语、葡萄牙语、德语、意大利语、俄语、日语、韩语、越南语、泰语、阿拉伯语等。',
|
174
|
+
displayName: 'Qwen2.5 7B Instruct',
|
175
|
+
id: 'qwen2.5-7b-instruct',
|
171
176
|
pricing: {
|
172
177
|
currency: 'CNY',
|
173
178
|
input: 0,
|
@@ -176,12 +181,12 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
176
181
|
type: 'chat',
|
177
182
|
},
|
178
183
|
{
|
179
|
-
contextWindowTokens:
|
184
|
+
contextWindowTokens: 8192,
|
180
185
|
description:
|
181
|
-
'
|
182
|
-
displayName: '
|
186
|
+
'Meta 发布的 LLaMA 3.3 多语言大规模语言模型(LLMs)是一个经过预训练和指令微调的生成模型,提供 70B 规模(文本输入/文本输出)。该模型使用超过 15T 的数据进行训练,支持英语、德语、法语、意大利语、葡萄牙语、印地语、西班牙语和泰语,知识更新截止于 2023 年 12 月。',
|
187
|
+
displayName: 'LLaMA 3.3 70B',
|
183
188
|
enabled: true,
|
184
|
-
id: '
|
189
|
+
id: 'llama-3.3-70b-instruct',
|
185
190
|
pricing: {
|
186
191
|
currency: 'CNY',
|
187
192
|
input: 0,
|
@@ -192,10 +197,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
192
197
|
{
|
193
198
|
contextWindowTokens: 32_768,
|
194
199
|
description:
|
195
|
-
'
|
196
|
-
displayName: '
|
197
|
-
|
198
|
-
id: 'qwen1.5-72b-chat',
|
200
|
+
'Qwen2 是 Qwen 团队推出的新一代大型语言模型系列。它基于 Transformer 架构,并采用 SwiGLU 激活函数、注意力 QKV 偏置(attention QKV bias)、群组查询注意力(group query attention)、滑动窗口注意力(mixture of sliding window attention)与全注意力的混合等技术。此外,Qwen 团队还改进了适应多种自然语言和代码的分词器。',
|
201
|
+
displayName: 'Qwen 2 72B Instruct',
|
202
|
+
id: 'qwen2-72b-instruct',
|
199
203
|
pricing: {
|
200
204
|
currency: 'CNY',
|
201
205
|
input: 0,
|
@@ -206,10 +210,9 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
206
210
|
{
|
207
211
|
contextWindowTokens: 32_768,
|
208
212
|
description:
|
209
|
-
'
|
210
|
-
displayName: '
|
211
|
-
|
212
|
-
id: 'qwen1.5-32b-chat',
|
213
|
+
'Qwen2 是 Qwen 团队推出的新一代大型语言模型系列。它基于 Transformer 架构,并采用 SwiGLU 激活函数、注意力 QKV 偏置(attention QKV bias)、群组查询注意力(group query attention)、滑动窗口注意力(mixture of sliding window attention)与全注意力的混合等技术。此外,Qwen 团队还改进了适应多种自然语言和代码的分词器。',
|
214
|
+
displayName: 'Qwen 2 7B Instruct',
|
215
|
+
id: 'qwen2-7b-instruct',
|
213
216
|
pricing: {
|
214
217
|
currency: 'CNY',
|
215
218
|
input: 0,
|
@@ -218,12 +221,11 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
218
221
|
type: 'chat',
|
219
222
|
},
|
220
223
|
{
|
221
|
-
contextWindowTokens:
|
224
|
+
contextWindowTokens: 4096,
|
222
225
|
description:
|
223
|
-
'
|
224
|
-
displayName: '
|
225
|
-
|
226
|
-
id: 'qwen1.5-14b-chat',
|
226
|
+
'Yi-1.5 是 Yi 的升级版本。 它使用 500B Tokens 的高质量语料库在 Yi 上持续进行预训练,并在 3M 个多样化的微调样本上进行微调。',
|
227
|
+
displayName: 'Yi-1.5 34B Chat',
|
228
|
+
id: 'yi-1.5-34b-chat',
|
227
229
|
pricing: {
|
228
230
|
currency: 'CNY',
|
229
231
|
input: 0,
|
@@ -235,7 +237,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
235
237
|
contextWindowTokens: 16_384,
|
236
238
|
description: 'GLM-4-9B-Chat 是智谱 AI 推出的最新一代预训练模型 GLM-4-9B 的人类偏好对齐版本。',
|
237
239
|
displayName: 'GLM-4 9B Chat',
|
238
|
-
enabled: true,
|
239
240
|
id: 'glm-4-9b-chat',
|
240
241
|
pricing: {
|
241
242
|
currency: 'CNY',
|
@@ -249,7 +250,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
249
250
|
description:
|
250
251
|
'ChatGLM3 是智谱 AI 与清华 KEG 实验室发布的闭源模型,经过海量中英标识符的预训练与人类偏好对齐训练,相比一代模型在 MMLU、C-Eval、GSM8K 分别取得了 16%、36%、280% 的提升,并登顶中文任务榜单 C-Eval。适用于对知识量、推理能力、创造力要求较高的场景,比如广告文案、小说写作、知识类写作、代码生成等。',
|
251
252
|
displayName: 'ChatGLM3',
|
252
|
-
enabled: true,
|
253
253
|
id: 'chatglm3',
|
254
254
|
pricing: {
|
255
255
|
currency: 'CNY',
|
@@ -263,7 +263,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
263
263
|
description:
|
264
264
|
'ChatGLM3-6b-base 是由智谱开发的 ChatGLM 系列最新一代的 60 亿参数规模的开源的基础模型。',
|
265
265
|
displayName: 'ChatGLM3 6B Base',
|
266
|
-
enabled: true,
|
267
266
|
id: 'chatglm3-6b-base',
|
268
267
|
pricing: {
|
269
268
|
currency: 'CNY',
|
@@ -277,7 +276,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
277
276
|
description:
|
278
277
|
'Llama2 是由 Meta 开发并开源的大型语言模型(LLM)系列,这是一组从 70 亿到 700 亿参数不同规模、经过预训练和微调的生成式文本模型。架构层面,LLama2 是一个使用优化型转换器架构的自动回归语言模型。调整后的版本使用有监督的微调(SFT)和带有人类反馈的强化学习(RLHF)以对齐人类对有用性和安全性的偏好。Llama2 较 Llama 系列在多种学术数据集上有着更加不俗的表现,为大量其他模型提供了设计和开发的思路。',
|
279
278
|
displayName: 'Llama 2 7B Chat',
|
280
|
-
enabled: true,
|
281
279
|
id: 'llama-2-7b-chat',
|
282
280
|
pricing: {
|
283
281
|
currency: 'CNY',
|
@@ -291,7 +289,6 @@ const infiniaiChatModels: AIChatModelCard[] = [
|
|
291
289
|
description:
|
292
290
|
'Megrez-3B-Instruct 是由无问芯穹完全自主训练的大语言模型。Megrez-3B-Instruct 旨在通过软硬协同理念,打造一款极速推理、小巧精悍、极易上手的端侧智能解决方案。',
|
293
291
|
displayName: 'Megrez 3B Instruct',
|
294
|
-
enabled: true,
|
295
292
|
id: 'megrez-3b-instruct',
|
296
293
|
pricing: {
|
297
294
|
currency: 'CNY',
|
@@ -521,7 +521,23 @@ const siliconcloudChatModels: AIChatModelCard[] = [
|
|
521
521
|
abilities: {
|
522
522
|
vision: true,
|
523
523
|
},
|
524
|
-
contextWindowTokens:
|
524
|
+
contextWindowTokens: 131_072,
|
525
|
+
description:
|
526
|
+
'Qwen2.5-VL-32B-Instruct 是通义千问团队推出的多模态大模型,是 Qwen2.5-VL 系列的一部分。该模型不仅精通识别常见物体,还能分析图像中的文本、图表、图标、图形和布局。它可作为视觉智能体,能够推理并动态操控工具,具备使用电脑和手机的能力。此外,这个模型可以精确定位图像中的对象,并为发票、表格等生成结构化输出。相比前代模型 Qwen2-VL,该版本在数学和问题解决能力方面通过强化学习得到了进一步提升,响应风格也更符合人类偏好。',
|
527
|
+
displayName: 'Qwen2.5 VL 32B Instruct',
|
528
|
+
id: 'Qwen/Qwen2.5-VL-32B-Instruct',
|
529
|
+
pricing: {
|
530
|
+
currency: 'CNY',
|
531
|
+
input: 1.89,
|
532
|
+
output: 1.89,
|
533
|
+
},
|
534
|
+
type: 'chat',
|
535
|
+
},
|
536
|
+
{
|
537
|
+
abilities: {
|
538
|
+
vision: true,
|
539
|
+
},
|
540
|
+
contextWindowTokens: 131_072,
|
525
541
|
description:
|
526
542
|
'Qwen2.5-VL 是 Qwen2.5 系列中的视觉语言模型。该模型在多方面有显著提升:具备更强的视觉理解能力,能够识别常见物体、分析文本、图表和布局;作为视觉代理能够推理并动态指导工具使用;支持理解超过 1 小时的长视频并捕捉关键事件;能够通过生成边界框或点准确定位图像中的物体;支持生成结构化输出,尤其适用于发票、表格等扫描数据。',
|
527
543
|
displayName: 'Qwen2.5 VL 72B Instruct',
|
@@ -19,6 +19,23 @@ const tencentCloudChatModels: AIChatModelCard[] = [
|
|
19
19
|
},
|
20
20
|
type: 'chat',
|
21
21
|
},
|
22
|
+
{
|
23
|
+
abilities: {
|
24
|
+
functionCall: true,
|
25
|
+
},
|
26
|
+
contextWindowTokens: 65_536,
|
27
|
+
description:
|
28
|
+
'DeepSeek-V3-0324 为671B 参数 MoE 模型,在编程与技术能力、上下文理解与长文本处理等方面优势突出。',
|
29
|
+
displayName: 'DeepSeek-V3-0324',
|
30
|
+
enabled: true,
|
31
|
+
id: 'deepseek-v3-0324',
|
32
|
+
pricing: {
|
33
|
+
currency: 'CNY',
|
34
|
+
input: 2,
|
35
|
+
output: 8,
|
36
|
+
},
|
37
|
+
type: 'chat',
|
38
|
+
},
|
22
39
|
{
|
23
40
|
abilities: {
|
24
41
|
functionCall: true,
|
package/src/const/hotkeys.ts
CHANGED
@@ -1,11 +1,81 @@
|
|
1
|
-
|
2
|
-
export const META_KEY = 'mod';
|
3
|
-
export const SAVE_TOPIC_KEY = 'n';
|
4
|
-
export const CLEAN_MESSAGE_KEY = 'backspace';
|
1
|
+
import { combineKeys } from '@lobehub/ui/es/Hotkey';
|
5
2
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
3
|
+
import {
|
4
|
+
HotkeyEnum,
|
5
|
+
HotkeyGroupEnum,
|
6
|
+
HotkeyRegistration,
|
7
|
+
HotkeyScopeEnum,
|
8
|
+
KeyEnum,
|
9
|
+
} from '@/types/hotkey';
|
10
|
+
|
11
|
+
// mod 在 Mac 上是 command 键,alt 在 Win 上是 ctrl 键
|
12
|
+
export const HOTKEYS_REGISTRATION: HotkeyRegistration = [
|
13
|
+
{
|
14
|
+
group: HotkeyGroupEnum.Essential,
|
15
|
+
id: HotkeyEnum.Search,
|
16
|
+
keys: combineKeys([KeyEnum.Mod, 'k']),
|
17
|
+
scopes: [HotkeyScopeEnum.Global],
|
18
|
+
},
|
19
|
+
{
|
20
|
+
group: HotkeyGroupEnum.Essential,
|
21
|
+
id: HotkeyEnum.SwitchAgent,
|
22
|
+
keys: combineKeys([KeyEnum.Ctrl, KeyEnum.Number]),
|
23
|
+
nonEditable: true,
|
24
|
+
scopes: [HotkeyScopeEnum.Global],
|
25
|
+
},
|
26
|
+
{
|
27
|
+
group: HotkeyGroupEnum.Essential,
|
28
|
+
id: HotkeyEnum.ToggleZenMode,
|
29
|
+
keys: combineKeys([KeyEnum.Mod, KeyEnum.Backslash]),
|
30
|
+
scopes: [HotkeyScopeEnum.Chat],
|
31
|
+
},
|
32
|
+
{
|
33
|
+
group: HotkeyGroupEnum.Essential,
|
34
|
+
id: HotkeyEnum.ToggleLeftPanel,
|
35
|
+
keys: combineKeys([KeyEnum.Mod, KeyEnum.BracketLeft]),
|
36
|
+
scopes: [HotkeyScopeEnum.Chat],
|
37
|
+
},
|
38
|
+
{
|
39
|
+
group: HotkeyGroupEnum.Essential,
|
40
|
+
id: HotkeyEnum.ToggleRightPanel,
|
41
|
+
keys: combineKeys([KeyEnum.Mod, KeyEnum.BracketRight]),
|
42
|
+
scopes: [HotkeyScopeEnum.Chat],
|
43
|
+
},
|
44
|
+
{
|
45
|
+
group: HotkeyGroupEnum.Essential,
|
46
|
+
id: HotkeyEnum.OpenHotkeyHelper,
|
47
|
+
keys: combineKeys([KeyEnum.Ctrl, KeyEnum.Shift, KeyEnum.QuestionMark]),
|
48
|
+
scopes: [HotkeyScopeEnum.Global],
|
49
|
+
},
|
50
|
+
{
|
51
|
+
group: HotkeyGroupEnum.Conversation,
|
52
|
+
id: HotkeyEnum.OpenChatSettings,
|
53
|
+
keys: combineKeys([KeyEnum.Alt, KeyEnum.Comma]),
|
54
|
+
scopes: [HotkeyScopeEnum.Chat],
|
55
|
+
},
|
56
|
+
{
|
57
|
+
group: HotkeyGroupEnum.Conversation,
|
58
|
+
id: HotkeyEnum.RegenerateMessage,
|
59
|
+
keys: combineKeys([KeyEnum.Alt, 'r']),
|
60
|
+
scopes: [HotkeyScopeEnum.Chat],
|
61
|
+
},
|
62
|
+
{
|
63
|
+
group: HotkeyGroupEnum.Conversation,
|
64
|
+
id: HotkeyEnum.SaveTopic,
|
65
|
+
keys: combineKeys([KeyEnum.Alt, 'n']),
|
66
|
+
scopes: [HotkeyScopeEnum.Chat],
|
67
|
+
},
|
68
|
+
{
|
69
|
+
group: HotkeyGroupEnum.Conversation,
|
70
|
+
id: HotkeyEnum.AddUserMessage,
|
71
|
+
keys: combineKeys([KeyEnum.Alt, KeyEnum.Enter]),
|
72
|
+
// 不通过 Scope 模式激活
|
73
|
+
},
|
74
|
+
{
|
75
|
+
group: HotkeyGroupEnum.Conversation,
|
76
|
+
id: HotkeyEnum.EditMessage,
|
77
|
+
keys: combineKeys([KeyEnum.Alt, KeyEnum.LeftDoubleClick]),
|
78
|
+
nonEditable: true,
|
79
|
+
scopes: [HotkeyScopeEnum.Chat],
|
80
|
+
},
|
81
|
+
];
|
@@ -0,0 +1,10 @@
|
|
1
|
+
import { HOTKEYS_REGISTRATION } from '@/const/hotkeys';
|
2
|
+
import { UserHotkeyConfig } from '@/types/user/settings';
|
3
|
+
|
4
|
+
export const DEFAULT_HOTKEY_CONFIG: UserHotkeyConfig = HOTKEYS_REGISTRATION.reduce(
|
5
|
+
(acc: UserHotkeyConfig, item) => {
|
6
|
+
acc[item.id] = item.keys;
|
7
|
+
return acc;
|
8
|
+
},
|
9
|
+
{} as UserHotkeyConfig,
|
10
|
+
);
|
@@ -2,6 +2,7 @@ import { UserSettings } from '@/types/user/settings';
|
|
2
2
|
|
3
3
|
import { DEFAULT_AGENT } from './agent';
|
4
4
|
import { DEFAULT_COMMON_SETTINGS } from './common';
|
5
|
+
import { DEFAULT_HOTKEY_CONFIG } from './hotkey';
|
5
6
|
import { DEFAULT_LLM_CONFIG } from './llm';
|
6
7
|
import { DEFAULT_SYNC_CONFIG } from './sync';
|
7
8
|
import { DEFAULT_SYSTEM_AGENT_CONFIG } from './systemAgent';
|
@@ -11,6 +12,7 @@ import { DEFAULT_TTS_CONFIG } from './tts';
|
|
11
12
|
export const COOKIE_CACHE_DAYS = 30;
|
12
13
|
|
13
14
|
export * from './agent';
|
15
|
+
export * from './hotkey';
|
14
16
|
export * from './llm';
|
15
17
|
export * from './systemAgent';
|
16
18
|
export * from './tool';
|
@@ -19,6 +21,7 @@ export * from './tts';
|
|
19
21
|
export const DEFAULT_SETTINGS: UserSettings = {
|
20
22
|
defaultAgent: DEFAULT_AGENT,
|
21
23
|
general: DEFAULT_COMMON_SETTINGS,
|
24
|
+
hotkey: DEFAULT_HOTKEY_CONFIG,
|
22
25
|
keyVaults: {},
|
23
26
|
languageModel: DEFAULT_LLM_CONFIG,
|
24
27
|
sync: DEFAULT_SYNC_CONFIG,
|
@@ -223,7 +223,10 @@
|
|
223
223
|
"hash": "9646161fa041354714f823d726af27247bcd6e60fa3be5698c0d69f337a5700b"
|
224
224
|
},
|
225
225
|
{
|
226
|
-
"sql": [
|
226
|
+
"sql": [
|
227
|
+
"DROP TABLE \"user_budgets\";",
|
228
|
+
"\nDROP TABLE \"user_subscriptions\";"
|
229
|
+
],
|
227
230
|
"bps": true,
|
228
231
|
"folderMillis": 1729699958471,
|
229
232
|
"hash": "7dad43a2a25d1aec82124a4e53f8d82f8505c3073f23606c1dc5d2a4598eacf9"
|
@@ -295,7 +298,9 @@
|
|
295
298
|
"hash": "845a692ceabbfc3caf252a97d3e19a213bc0c433df2689900135f9cfded2cf49"
|
296
299
|
},
|
297
300
|
{
|
298
|
-
"sql": [
|
301
|
+
"sql": [
|
302
|
+
"ALTER TABLE \"messages\" ADD COLUMN \"reasoning\" jsonb;"
|
303
|
+
],
|
299
304
|
"bps": true,
|
300
305
|
"folderMillis": 1737609172353,
|
301
306
|
"hash": "2cb36ae4fcdd7b7064767e04bfbb36ae34518ff4bb1b39006f2dd394d1893868"
|
@@ -383,37 +388,46 @@
|
|
383
388
|
},
|
384
389
|
{
|
385
390
|
"sql": [
|
386
|
-
"ALTER TABLE \"session_groups\" DROP CONSTRAINT \"session_group_client_id_user_unique\";",
|
387
|
-
"\nALTER TABLE \"sessions\" DROP CONSTRAINT \"sessions_client_id_user_id_unique\";",
|
388
|
-
"\nALTER TABLE \"topics\" DROP CONSTRAINT \"topic_client_id_user_id_unique\";",
|
389
|
-
"\nALTER TABLE \"agents\" ADD COLUMN \"client_id\" text;",
|
390
|
-
"\nALTER TABLE \"files\" ADD COLUMN \"client_id\" text;",
|
391
|
-
"\nALTER TABLE \"knowledge_bases\" ADD COLUMN \"client_id\" text;",
|
392
|
-
"\nALTER TABLE \"message_plugins\" ADD COLUMN \"client_id\" text;",
|
393
|
-
"\nALTER TABLE \"message_queries\" ADD COLUMN \"client_id\" text;",
|
394
|
-
"\nALTER TABLE \"message_tts\" ADD COLUMN \"client_id\" text;",
|
395
|
-
"\nALTER TABLE \"message_translates\" ADD COLUMN \"client_id\" text;",
|
396
|
-
"\nALTER TABLE \"chunks\" ADD COLUMN \"client_id\" text;",
|
397
|
-
"\nALTER TABLE \"embeddings\" ADD COLUMN \"client_id\" text;",
|
398
|
-
"\nALTER TABLE \"unstructured_chunks\" ADD COLUMN \"client_id\" text;",
|
399
|
-
"\nALTER TABLE \"threads\" ADD COLUMN \"client_id\" text;",
|
400
|
-
"\nCREATE UNIQUE INDEX \"client_id_user_id_unique\" ON \"agents\" USING btree (\"client_id\",\"user_id\");",
|
401
|
-
"\nCREATE UNIQUE INDEX \"files_client_id_user_id_unique\" ON \"files\" USING btree (\"client_id\",\"user_id\");",
|
402
|
-
"\nCREATE UNIQUE INDEX \"knowledge_bases_client_id_user_id_unique\" ON \"knowledge_bases\" USING btree (\"client_id\",\"user_id\");",
|
403
|
-
"\nCREATE UNIQUE INDEX \"message_plugins_client_id_user_id_unique\" ON \"message_plugins\" USING btree (\"client_id\",\"user_id\");",
|
404
|
-
"\nCREATE UNIQUE INDEX \"message_queries_client_id_user_id_unique\" ON \"message_queries\" USING btree (\"client_id\",\"user_id\");",
|
405
|
-
"\nCREATE UNIQUE INDEX \"message_tts_client_id_user_id_unique\" ON \"message_tts\" USING btree (\"client_id\",\"user_id\");",
|
406
|
-
"\nCREATE UNIQUE INDEX \"message_translates_client_id_user_id_unique\" ON \"message_translates\" USING btree (\"client_id\",\"user_id\");",
|
407
|
-
"\nCREATE UNIQUE INDEX \"chunks_client_id_user_id_unique\" ON \"chunks\" USING btree (\"client_id\",\"user_id\");",
|
408
|
-
"\nCREATE UNIQUE INDEX \"embeddings_client_id_user_id_unique\" ON \"embeddings\" USING btree (\"client_id\",\"user_id\");",
|
409
|
-
"\nCREATE UNIQUE INDEX \"unstructured_chunks_client_id_user_id_unique\" ON \"unstructured_chunks\" USING btree (\"client_id\",\"user_id\");",
|
410
|
-
"\nCREATE UNIQUE INDEX \"session_groups_client_id_user_id_unique\" ON \"session_groups\" USING btree (\"client_id\",\"user_id\");",
|
411
|
-
"\nCREATE UNIQUE INDEX \"sessions_client_id_user_id_unique\" ON \"sessions\" USING btree (\"client_id\",\"user_id\");",
|
412
|
-
"\nCREATE UNIQUE INDEX \"threads_client_id_user_id_unique\" ON \"threads\" USING btree (\"client_id\",\"user_id\");",
|
413
|
-
"\nCREATE UNIQUE INDEX \"topics_client_id_user_id_unique\" ON \"topics\" USING btree (\"client_id\",\"user_id\")
|
391
|
+
"ALTER TABLE \"session_groups\" DROP CONSTRAINT IF EXISTS \"session_group_client_id_user_unique\";",
|
392
|
+
"\nALTER TABLE \"sessions\" DROP CONSTRAINT IF EXISTS \"sessions_client_id_user_id_unique\";",
|
393
|
+
"\nALTER TABLE \"topics\" DROP CONSTRAINT IF EXISTS \"topic_client_id_user_id_unique\";",
|
394
|
+
"\n\n-- add client_id column\nALTER TABLE \"agents\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
395
|
+
"\nALTER TABLE \"files\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
396
|
+
"\nALTER TABLE \"knowledge_bases\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
397
|
+
"\nALTER TABLE \"message_plugins\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
398
|
+
"\nALTER TABLE \"message_queries\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
399
|
+
"\nALTER TABLE \"message_tts\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
400
|
+
"\nALTER TABLE \"message_translates\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
401
|
+
"\nALTER TABLE \"chunks\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
402
|
+
"\nALTER TABLE \"embeddings\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
403
|
+
"\nALTER TABLE \"unstructured_chunks\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
404
|
+
"\nALTER TABLE \"threads\" ADD COLUMN IF NOT EXISTS \"client_id\" text;",
|
405
|
+
"\n\n-- Create unique index(using IF NOT EXISTS)\nCREATE UNIQUE INDEX IF NOT EXISTS \"client_id_user_id_unique\" ON \"agents\" USING btree (\"client_id\",\"user_id\");",
|
406
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"files_client_id_user_id_unique\" ON \"files\" USING btree (\"client_id\",\"user_id\");",
|
407
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"knowledge_bases_client_id_user_id_unique\" ON \"knowledge_bases\" USING btree (\"client_id\",\"user_id\");",
|
408
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_plugins_client_id_user_id_unique\" ON \"message_plugins\" USING btree (\"client_id\",\"user_id\");",
|
409
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_queries_client_id_user_id_unique\" ON \"message_queries\" USING btree (\"client_id\",\"user_id\");",
|
410
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_tts_client_id_user_id_unique\" ON \"message_tts\" USING btree (\"client_id\",\"user_id\");",
|
411
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_translates_client_id_user_id_unique\" ON \"message_translates\" USING btree (\"client_id\",\"user_id\");",
|
412
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"chunks_client_id_user_id_unique\" ON \"chunks\" USING btree (\"client_id\",\"user_id\");",
|
413
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"embeddings_client_id_user_id_unique\" ON \"embeddings\" USING btree (\"client_id\",\"user_id\");",
|
414
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"unstructured_chunks_client_id_user_id_unique\" ON \"unstructured_chunks\" USING btree (\"client_id\",\"user_id\");",
|
415
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"session_groups_client_id_user_id_unique\" ON \"session_groups\" USING btree (\"client_id\",\"user_id\");",
|
416
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"sessions_client_id_user_id_unique\" ON \"sessions\" USING btree (\"client_id\",\"user_id\");",
|
417
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"threads_client_id_user_id_unique\" ON \"threads\" USING btree (\"client_id\",\"user_id\");",
|
418
|
+
"\nCREATE UNIQUE INDEX IF NOT EXISTS \"topics_client_id_user_id_unique\" ON \"topics\" USING btree (\"client_id\",\"user_id\");\n"
|
414
419
|
],
|
415
420
|
"bps": true,
|
416
421
|
"folderMillis": 1742616026643,
|
417
|
-
"hash": "
|
422
|
+
"hash": "110a49a1a7c42ded9a9613edb8dc15fee0b4b3dd8061f871b7b42241f29987b5"
|
423
|
+
},
|
424
|
+
{
|
425
|
+
"sql": [
|
426
|
+
"-- Add hotkey column to user_settings table\nALTER TABLE \"user_settings\" ADD COLUMN IF NOT EXISTS \"hotkey\" jsonb;",
|
427
|
+
"\n"
|
428
|
+
],
|
429
|
+
"bps": true,
|
430
|
+
"folderMillis": 1742806552131,
|
431
|
+
"hash": "d9032edf59f717e0e71da5a95b3545ec251c311155cafe14df01a12800a9eae0"
|
418
432
|
}
|
419
|
-
]
|
433
|
+
]
|