@lobehub/chat 1.128.10 → 1.129.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/.env.example +5 -0
  2. package/CHANGELOG.md +58 -0
  3. package/Dockerfile +3 -1
  4. package/Dockerfile.database +3 -1
  5. package/Dockerfile.pglite +3 -1
  6. package/changelog/v1.json +18 -0
  7. package/docs/development/database-schema.dbml +9 -0
  8. package/docs/self-hosting/environment-variables/model-provider.mdx +24 -0
  9. package/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx +27 -1
  10. package/docs/usage/providers/vercel-ai-gateway.mdx +62 -0
  11. package/docs/usage/providers/vercel-ai-gateway.zh-CN.mdx +61 -0
  12. package/package.json +1 -1
  13. package/packages/database/migrations/0031_add_agent_index.sql +7 -1
  14. package/packages/database/migrations/0032_improve_agents_field.sql +0 -4
  15. package/packages/database/migrations/0033_modern_mercury.sql +18 -0
  16. package/packages/database/migrations/meta/0033_snapshot.json +6594 -0
  17. package/packages/database/migrations/meta/_journal.json +7 -0
  18. package/packages/database/src/core/migrations.json +23 -6
  19. package/packages/database/src/schemas/message.ts +12 -11
  20. package/packages/database/src/schemas/rag.ts +10 -6
  21. package/packages/database/src/schemas/session.ts +7 -5
  22. package/packages/database/src/schemas/topic.ts +7 -3
  23. package/packages/model-bank/package.json +2 -1
  24. package/packages/model-bank/src/aiModels/index.ts +3 -0
  25. package/packages/model-bank/src/aiModels/siliconcloud.ts +45 -0
  26. package/packages/model-bank/src/aiModels/vercelaigateway.ts +1803 -0
  27. package/packages/model-runtime/src/const/modelProvider.ts +1 -0
  28. package/packages/model-runtime/src/providers/siliconcloud/index.ts +19 -11
  29. package/packages/model-runtime/src/providers/vercelaigateway/index.ts +62 -0
  30. package/packages/model-runtime/src/runtimeMap.ts +2 -0
  31. package/packages/types/src/user/settings/keyVaults.ts +1 -0
  32. package/src/config/modelProviders/index.ts +4 -0
  33. package/src/config/modelProviders/vercelaigateway.ts +21 -0
  34. package/src/envs/llm.ts +6 -0
package/.env.example CHANGED
@@ -178,6 +178,11 @@ OPENAI_API_KEY=sk-xxxxxxxxx
178
178
  # NEWAPI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
179
179
  # NEWAPI_PROXY_URL=https://your-newapi-server.com
180
180
 
181
+ ### Vercel AI Gateway ###
182
+
183
+ # VERCELAIGATEWAY_API_KEY=your_vercel_ai_gateway_api_key
184
+
185
+
181
186
  ########################################
182
187
  ############ Market Service ############
183
188
  ########################################
package/CHANGELOG.md CHANGED
@@ -2,6 +2,64 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.129.1](https://github.com/lobehub/lobe-chat/compare/v1.129.0...v1.129.1)
6
+
7
+ <sup>Released on **2025-09-16**</sup>
8
+
9
+ #### ♻ Code Refactoring
10
+
11
+ - **misc**: Improve db sql performance.
12
+
13
+ #### 💄 Styles
14
+
15
+ - **misc**: Update SiliconCloud reasoning models.
16
+
17
+ <br/>
18
+
19
+ <details>
20
+ <summary><kbd>Improvements and Fixes</kbd></summary>
21
+
22
+ #### Code refactoring
23
+
24
+ - **misc**: Improve db sql performance, closes [#9283](https://github.com/lobehub/lobe-chat/issues/9283) ([cee555a](https://github.com/lobehub/lobe-chat/commit/cee555a))
25
+
26
+ #### Styles
27
+
28
+ - **misc**: Update SiliconCloud reasoning models, closes [#9287](https://github.com/lobehub/lobe-chat/issues/9287) ([b47bb5b](https://github.com/lobehub/lobe-chat/commit/b47bb5b))
29
+
30
+ </details>
31
+
32
+ <div align="right">
33
+
34
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
35
+
36
+ </div>
37
+
38
+ ## [Version 1.129.0](https://github.com/lobehub/lobe-chat/compare/v1.128.10...v1.129.0)
39
+
40
+ <sup>Released on **2025-09-16**</sup>
41
+
42
+ #### ✨ Features
43
+
44
+ - **misc**: Support Vercel AI Gateway provider.
45
+
46
+ <br/>
47
+
48
+ <details>
49
+ <summary><kbd>Improvements and Fixes</kbd></summary>
50
+
51
+ #### What's improved
52
+
53
+ - **misc**: Support Vercel AI Gateway provider, closes [#8883](https://github.com/lobehub/lobe-chat/issues/8883) ([5a4b0fd](https://github.com/lobehub/lobe-chat/commit/5a4b0fd))
54
+
55
+ </details>
56
+
57
+ <div align="right">
58
+
59
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
60
+
61
+ </div>
62
+
5
63
  ### [Version 1.128.10](https://github.com/lobehub/lobe-chat/compare/v1.128.9...v1.128.10)
6
64
 
7
65
  <sup>Released on **2025-09-16**</sup>
package/Dockerfile CHANGED
@@ -257,7 +257,9 @@ ENV \
257
257
  # FAL
258
258
  FAL_API_KEY="" FAL_MODEL_LIST="" \
259
259
  # BFL
260
- BFL_API_KEY="" BFL_MODEL_LIST=""
260
+ BFL_API_KEY="" BFL_MODEL_LIST="" \
261
+ # Vercel AI Gateway
262
+ VERCELAIGATEWAY_API_KEY="" VERCELAIGATEWAY_MODEL_LIST=""
261
263
 
262
264
  USER nextjs
263
265
 
@@ -299,7 +299,9 @@ ENV \
299
299
  # FAL
300
300
  FAL_API_KEY="" FAL_MODEL_LIST="" \
301
301
  # BFL
302
- BFL_API_KEY="" BFL_MODEL_LIST=""
302
+ BFL_API_KEY="" BFL_MODEL_LIST="" \
303
+ # Vercel AI Gateway
304
+ VERCELAIGATEWAY_API_KEY="" VERCELAIGATEWAY_MODEL_LIST=""
303
305
 
304
306
  USER nextjs
305
307
 
package/Dockerfile.pglite CHANGED
@@ -255,7 +255,9 @@ ENV \
255
255
  # FAL
256
256
  FAL_API_KEY="" FAL_MODEL_LIST="" \
257
257
  # BFL
258
- BFL_API_KEY="" BFL_MODEL_LIST=""
258
+ BFL_API_KEY="" BFL_MODEL_LIST="" \
259
+ # Vercel AI Gateway
260
+ VERCELAIGATEWAY_API_KEY="" VERCELAIGATEWAY_MODEL_LIST=""
259
261
 
260
262
  USER nextjs
261
263
 
package/changelog/v1.json CHANGED
@@ -1,4 +1,22 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "improvements": [
5
+ "Update SiliconCloud reasoning models."
6
+ ]
7
+ },
8
+ "date": "2025-09-16",
9
+ "version": "1.129.1"
10
+ },
11
+ {
12
+ "children": {
13
+ "features": [
14
+ "Support Vercel AI Gateway provider."
15
+ ]
16
+ },
17
+ "date": "2025-09-16",
18
+ "version": "1.129.0"
19
+ },
2
20
  {
3
21
  "children": {
4
22
  "fixes": [
@@ -419,6 +419,9 @@ table messages {
419
419
  topic_id [name: 'messages_topic_id_idx']
420
420
  parent_id [name: 'messages_parent_id_idx']
421
421
  quota_id [name: 'messages_quota_id_idx']
422
+ user_id [name: 'messages_user_id_idx']
423
+ session_id [name: 'messages_session_id_idx']
424
+ thread_id [name: 'messages_thread_id_idx']
422
425
  }
423
426
  }
424
427
 
@@ -624,6 +627,7 @@ table chunks {
624
627
 
625
628
  indexes {
626
629
  (client_id, user_id) [name: 'chunks_client_id_user_id_unique', unique]
630
+ user_id [name: 'chunks_user_id_idx']
627
631
  }
628
632
  }
629
633
 
@@ -637,6 +641,7 @@ table embeddings {
637
641
 
638
642
  indexes {
639
643
  (client_id, user_id) [name: 'embeddings_client_id_user_id_unique', unique]
644
+ chunk_id [name: 'embeddings_chunk_id_idx']
640
645
  }
641
646
  }
642
647
 
@@ -836,6 +841,8 @@ table sessions {
836
841
  indexes {
837
842
  (slug, user_id) [name: 'slug_user_id_unique', unique]
838
843
  (client_id, user_id) [name: 'sessions_client_id_user_id_unique', unique]
844
+ user_id [name: 'sessions_user_id_idx']
845
+ (id, user_id) [name: 'sessions_id_user_id_idx']
839
846
  }
840
847
  }
841
848
 
@@ -886,6 +893,8 @@ table topics {
886
893
 
887
894
  indexes {
888
895
  (client_id, user_id) [name: 'topics_client_id_user_id_unique', unique]
896
+ user_id [name: 'topics_user_id_idx']
897
+ (id, user_id) [name: 'topics_id_user_id_idx']
889
898
  }
890
899
  }
891
900
 
@@ -3,6 +3,7 @@ title: LobeChat Model Service Providers - Environment Variables and Configuratio
3
3
  description: >-
4
4
  Learn about the environment variables and configuration settings for various model service providers like OpenAI, Google AI, AWS Bedrock, Ollama, Perplexity AI, Anthropic AI, Mistral AI, Groq AI, OpenRouter AI, and 01.AI.
5
5
 
6
+
6
7
  tags:
7
8
  - Model Service Providers
8
9
  - Environment Variables
@@ -693,4 +694,27 @@ The above example disables all models first, then enables `flux-pro-1.1` and `fl
693
694
 
694
695
  NewAPI is a multi-provider model aggregation service that supports automatic model routing based on provider detection. It offers cost management features and provides a single endpoint for accessing models from multiple providers including OpenAI, Anthropic, Google, and more. Learn more about NewAPI at [https://github.com/Calcium-Ion/new-api](https://github.com/Calcium-Ion/new-api).
695
696
 
697
+ ## Vercel AI Gateway
698
+
699
+ ### `ENABLED_VERCELAIGATEWAY`
700
+
701
+ - Type: Optional
702
+ - Description: Enables Vercel AI Gateway as a model provider by default. Set to `0` to disable the Vercel AI Gateway service.
703
+ - Default: `1`
704
+ - Example: `0`
705
+
706
+ ### `VERCELAIGATEWAY_API_KEY`
707
+
708
+ - Type: Required
709
+ - Description: This is the API key you applied for in the Vercel AI Gateway service.
710
+ - Default: -
711
+ - Example: `vck_xxxxxx...xxxxxx`
712
+
713
+ ### `VERCELAIGATEWAY_MODEL_LIST`
714
+
715
+ - Type: Optional
716
+ - Description: Used to control the Vercel AI Gateway model list. Use `+` to add a model, `-` to hide a model, and `model_name=display_name` to customize the display name of a model. Separate multiple entries with commas. The definition syntax follows the same rules as other providers' model lists.
717
+ - Default: `-`
718
+ - Example: `-all,+vercel-model-1,+vercel-model-2=vercel-special`
719
+
696
720
  [model-list]: /docs/self-hosting/advanced/model-list
@@ -691,7 +691,33 @@ LobeChat 在部署时提供了丰富的模型服务商相关的环境变量,
691
691
  - 示例:`https://your-newapi-server.com`
692
692
 
693
693
  <Callout type={'info'}>
694
- NewAPI 是一个多供应商模型聚合服务,支持基于供应商检测的自动模型路由。它提供成本管理功能,并为访问包括 OpenAI、Anthropic、Google 等多个供应商的模型提供单一端点。了解更多关于 NewAPI 的信息请访问 [https://github.com/Calcium-Ion/new-api](https://github.com/Calcium-Ion/new-api)。
694
+ NewAPI
695
+ 是一个多供应商模型聚合服务,支持基于供应商检测的自动模型路由。它提供成本管理功能,并为访问包括
696
+ OpenAI、Anthropic、Google 等多个供应商的模型提供单一端点。了解更多关于 NewAPI 的信息请访问
697
+ [https://github.com/Calcium-Ion/new-api](https://github.com/Calcium-Ion/new-api)。
695
698
  </Callout>
696
699
 
700
+ ## Vercel AI Gateway
701
+
702
+ ### `ENABLED_VERCELAIGATEWAY`
703
+
704
+ - 类型:可选
705
+ - 描述:默认启用 Vercel AI Gateway 作为模型供应商,当设为 0 时关闭 Vercel AI Gateway 服务
706
+ - 默认值:`1`
707
+ - 示例:`0`
708
+
709
+ ### `VERCELAIGATEWAY_API_KEY`
710
+
711
+ - 类型:必选
712
+ - 描述:这是你在 Vercel AI Gateway 服务中申请的 API 密钥
713
+ - 默认值:-
714
+ - 示例:`vck_xxxxxx...xxxxxx`
715
+
716
+ ### `VERCELAIGATEWAY_MODEL_LIST`
717
+
718
+ - 类型:可选
719
+ - 描述:用来控制 Vercel AI Gateway 模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。模型定义语法规则与其他 provider 保持一致。
720
+ - 默认值:`-`
721
+ - 示例:`-all,+vercel-model-1,+vercel-model-2=vercel-special`
722
+
697
723
  [model-list]: /zh/docs/self-hosting/advanced/model-list
@@ -0,0 +1,62 @@
1
+ ---
2
+ title: Using Vercel AI Gateway in LobeChat
3
+ description: >-
4
+ Learn how to integrate and utilize Vercel AI Gateway's unified API in LobeChat.
5
+
6
+ tags:
7
+ - LobeChat
8
+ - Vercel AI Gateway
9
+ - API Key
10
+ - Web UI
11
+ ---
12
+
13
+ # Using Vercel AI Gateway in LobeChat
14
+
15
+ [Vercel AI Gateway](https://vercel.com/ai-gateway) is a unified API that provides access to 100+ AI models through a single endpoint. It offers features like budget management, usage monitoring, load balancing, and fallback handling.
16
+
17
+ This article will guide you on how to use Vercel AI Gateway in LobeChat.
18
+
19
+ <Steps>
20
+ ### Step 1: Create an API Key in Vercel AI Gateway
21
+
22
+ - Go to [Vercel Dashboard](https://vercel.com/dashboard)
23
+ - Click on the **AI Gateway** tab on the left side
24
+ - Click on **API keys** in the left sidebar
25
+ - Click **Create key** and then **Create key** in the dialog to complete
26
+
27
+ ### Step 2: Configure Vercel AI Gateway in LobeChat
28
+
29
+ - Go to the `Settings` page in LobeChat
30
+ - Under `AI Service Provider`, find the setting for `Vercel AI Gateway`
31
+ - Enter the API Key you obtained
32
+ - Choose a model from Vercel AI Gateway for your AI assistant to start the conversation
33
+
34
+ <Callout type={'warning'}>
35
+ During usage, you may need to pay the API service provider, so please refer to Vercel AI Gateway's
36
+ [pricing policy](https://vercel.com/docs/ai-gateway/models).
37
+ </Callout>
38
+ </Steps>
39
+
40
+ At this point, you can start chatting using the models provided by Vercel AI Gateway in LobeChat.
41
+
42
+ ## Model Selection
43
+
44
+ Vercel AI Gateway supports various model providers including:
45
+
46
+ - **OpenAI**: `openai/gpt-4o`, `openai/gpt-4o-mini`, `openai/o1`, etc.
47
+ - **Anthropic**: `anthropic/claude-3-5-sonnet`, `anthropic/claude-3-opus`, etc.
48
+ - **Google**: `google/gemini-2.5-pro`, `google/gemini-2.0-flash`, etc.
49
+ - **DeepSeek**: `deepseek/deepseek-chat`, `deepseek/deepseek-reasoner`, etc.
50
+ - And many more...
51
+
52
+ For a complete list of supported models, visit [Vercel AI Gateway Models](https://vercel.com/ai-gateway/models).
53
+
54
+ ## API Configuration
55
+
56
+ Vercel AI Gateway uses OpenAI-compatible API format. The base URL is:
57
+
58
+ ```
59
+ https://ai-gateway.vercel.sh/v1
60
+ ```
61
+
62
+ You can use any OpenAI-compatible client with this endpoint and your API key.
@@ -0,0 +1,61 @@
1
+ ---
2
+ title: 在 LobeChat 中使用 Vercel AI Gateway
3
+ description: 了解如何在 LobeChat 中集成和使用 Vercel AI Gateway 的统一 API
4
+
5
+ tags:
6
+ - LobeChat
7
+ - Vercel AI Gateway
8
+ - API 密钥
9
+ - Web 界面
10
+ ---
11
+
12
+ # 在 LobeChat 中使用 Vercel AI Gateway
13
+
14
+ [Vercel AI Gateway](https://vercel.com/ai-gateway) 是一个统一的 API,通过单一端点提供对 100+ AI 模型的访问。它提供预算管理、使用监控、负载均衡和回退处理等功能。
15
+
16
+ 本文将指导您如何在 LobeChat 中使用 Vercel AI Gateway。
17
+
18
+ <Steps>
19
+ ### 第一步:在 Vercel AI Gateway 中创建 API 密钥
20
+
21
+ - 访问 [Vercel 控制台](https://vercel.com/dashboard)
22
+ - 点击左侧的 **AI Gateway** 标签
23
+ - 点击左侧边栏的 **API 密钥**
24
+ - 点击 **创建密钥**,然后在对话框中点击 **创建密钥** 完成创建
25
+
26
+ ### 第二步:在 LobeChat 中配置 Vercel AI Gateway
27
+
28
+ - 进入 LobeChat 的 `设置` 页面
29
+ - 在 `AI 服务提供商` 下,找到 `Vercel AI Gateway` 设置
30
+ - 输入您获得的 API 密钥
31
+ - 选择 Vercel AI Gateway 的模型,开始与 AI 助手对话
32
+
33
+ <Callout type={'warning'}>
34
+ 使用过程中可能需要向 API 服务提供商付费,请参考 Vercel AI Gateway 的
35
+ [定价政策](https://vercel.com/docs/ai-gateway/models)。
36
+ </Callout>
37
+ </Steps>
38
+
39
+ 至此,您可以在 LobeChat 中使用 Vercel AI Gateway 提供的模型开始聊天了。
40
+
41
+ ## 模型选择
42
+
43
+ Vercel AI Gateway 支持多种模型提供商,包括:
44
+
45
+ - **OpenAI**: `openai/gpt-4o`、`openai/gpt-4o-mini`、`openai/o1` 等
46
+ - **Anthropic**: `anthropic/claude-3-5-sonnet`、`anthropic/claude-3-opus` 等
47
+ - **Google**: `google/gemini-2.5-pro`、`google/gemini-2.0-flash` 等
48
+ - **DeepSeek**: `deepseek/deepseek-chat`、`deepseek/deepseek-reasoner` 等
49
+ - 以及更多...
50
+
51
+ 如需查看完整的支持模型列表,请访问 [Vercel AI Gateway 模型](https://vercel.com/ai-gateway/models)。
52
+
53
+ ## API 配置
54
+
55
+ Vercel AI Gateway 使用 OpenAI 兼容的 API 格式。基础 URL 为:
56
+
57
+ ```
58
+ https://ai-gateway.vercel.sh/v1
59
+ ```
60
+
61
+ 您可以使用任何 OpenAI 兼容的客户端与此端点和您的 API 密钥一起使用。
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.128.10",
3
+ "version": "1.129.1",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -1,6 +1,12 @@
1
- -- 将超过 1000 字符的 description 截断为 1000 字符
1
+ -- Truncate title to 255 characters if it exceeds the limit
2
+ UPDATE agents
3
+ SET title = LEFT(title, 255)
4
+ WHERE LENGTH(title) > 255;--> statement-breakpoint
5
+
6
+ -- Truncate description to 1000 characters if it exceeds the limit
2
7
  UPDATE agents
3
8
  SET description = LEFT(description, 1000)
4
9
  WHERE LENGTH(description) > 1000;--> statement-breakpoint
10
+
5
11
  CREATE INDEX IF NOT EXISTS "agents_title_idx" ON "agents" USING btree ("title");--> statement-breakpoint
6
12
  CREATE INDEX IF NOT EXISTS "agents_description_idx" ON "agents" USING btree ("description");
@@ -1,6 +1,2 @@
1
- -- 将超过 255 字符的 title 截断为 255 字符
2
- UPDATE agents
3
- SET title = LEFT(title, 255)
4
- WHERE LENGTH(title) > 255;--> statement-breakpoint
5
1
  ALTER TABLE "agents" ALTER COLUMN "title" SET DATA TYPE varchar(255);--> statement-breakpoint
6
2
  ALTER TABLE "agents" ALTER COLUMN "description" SET DATA TYPE varchar(1000);
@@ -0,0 +1,18 @@
1
+ -- 解决 chunks 表慢查询
2
+ CREATE INDEX IF NOT EXISTS "chunks_user_id_idx" ON "chunks" USING btree ("user_id");--> statement-breakpoint
3
+
4
+ -- 解决 topics 表批量删除慢查询
5
+ CREATE INDEX IF NOT EXISTS "topics_user_id_idx" ON "topics" USING btree ("user_id");--> statement-breakpoint
6
+ CREATE INDEX IF NOT EXISTS "topics_id_user_id_idx" ON "topics" USING btree ("id","user_id");--> statement-breakpoint
7
+
8
+ -- 解决 sessions 表删除慢查询
9
+ CREATE INDEX IF NOT EXISTS "sessions_user_id_idx" ON "sessions" USING btree ("user_id");--> statement-breakpoint
10
+ CREATE INDEX IF NOT EXISTS "sessions_id_user_id_idx" ON "sessions" USING btree ("id","user_id");--> statement-breakpoint
11
+
12
+ -- 解决 messages 表统计查询慢查询
13
+ CREATE INDEX IF NOT EXISTS "messages_user_id_idx" ON "messages" USING btree ("user_id");--> statement-breakpoint
14
+ CREATE INDEX IF NOT EXISTS "messages_session_id_idx" ON "messages" USING btree ("session_id");--> statement-breakpoint
15
+ CREATE INDEX IF NOT EXISTS "messages_thread_id_idx" ON "messages" USING btree ("thread_id");--> statement-breakpoint
16
+
17
+ -- 解决 embeddings 删除慢查询
18
+ CREATE INDEX IF NOT EXISTS "embeddings_chunk_id_idx" ON "embeddings" USING btree ("chunk_id");--> statement-breakpoint