@lobehub/chat 1.113.0 → 1.113.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.113.2](https://github.com/lobehub/lobe-chat/compare/v1.113.1...v1.113.2)
6
+
7
+ <sup>Released on **2025-08-18**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **mcp**: Use customParams for environment settings fallback.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **mcp**: Use customParams for environment settings fallback, closes [#8814](https://github.com/lobehub/lobe-chat/issues/8814) ([ab043d4](https://github.com/lobehub/lobe-chat/commit/ab043d4))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
30
+ ### [Version 1.113.1](https://github.com/lobehub/lobe-chat/compare/v1.113.0...v1.113.1)
31
+
32
+ <sup>Released on **2025-08-17**</sup>
33
+
34
+ #### 🐛 Bug Fixes
35
+
36
+ - **db**: Desktop local db can't vectorization.
37
+
38
+ <br/>
39
+
40
+ <details>
41
+ <summary><kbd>Improvements and Fixes</kbd></summary>
42
+
43
+ #### What's fixed
44
+
45
+ - **db**: Desktop local db can't vectorization, closes [#8830](https://github.com/lobehub/lobe-chat/issues/8830) ([a00fd9d](https://github.com/lobehub/lobe-chat/commit/a00fd9d))
46
+
47
+ </details>
48
+
49
+ <div align="right">
50
+
51
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
52
+
53
+ </div>
54
+
5
55
  ## [Version 1.113.0](https://github.com/lobehub/lobe-chat/compare/v1.112.5...v1.113.0)
6
56
 
7
57
  <sup>Released on **2025-08-17**</sup>
package/README.md CHANGED
@@ -255,6 +255,7 @@ We have implemented support for the following model service providers:
255
255
  - **[GitHub](https://lobechat.com/discover/provider/github)**: With GitHub Models, developers can become AI engineers and leverage the industry's leading AI models.
256
256
  - **[Novita](https://lobechat.com/discover/provider/novita)**: Novita AI is a platform providing a variety of large language models and AI image generation API services, flexible, reliable, and cost-effective. It supports the latest open-source models like Llama3 and Mistral, offering a comprehensive, user-friendly, and auto-scaling API solution for generative AI application development, suitable for the rapid growth of AI startups.
257
257
  - **[PPIO](https://lobechat.com/discover/provider/ppio)**: PPIO supports stable and cost-efficient open-source LLM APIs, such as DeepSeek, Llama, Qwen etc.
258
+ - **[302.AI](https://lobechat.com/discover/provider/ai302)**: 302.AI is an on-demand AI application platform offering the most comprehensive AI APIs and online AI applications available on the market.
258
259
  - **[Together AI](https://lobechat.com/discover/provider/togetherai)**: Together AI is dedicated to achieving leading performance through innovative AI models, offering extensive customization capabilities, including rapid scaling support and intuitive deployment processes to meet various enterprise needs.
259
260
  - **[Fireworks AI](https://lobechat.com/discover/provider/fireworksai)**: Fireworks AI is a leading provider of advanced language model services, focusing on functional calling and multimodal processing. Its latest model, Firefunction V2, is based on Llama-3, optimized for function calling, conversation, and instruction following. The visual language model FireLLaVA-13B supports mixed input of images and text. Other notable models include the Llama series and Mixtral series, providing efficient multilingual instruction following and generation support.
260
261
  - **[Groq](https://lobechat.com/discover/provider/groq)**: Groq's LPU inference engine has excelled in the latest independent large language model (LLM) benchmarks, redefining the standards for AI solutions with its remarkable speed and efficiency. Groq represents instant inference speed, demonstrating strong performance in cloud-based deployments.
@@ -283,7 +284,6 @@ We have implemented support for the following model service providers:
283
284
  - **[Search1API](https://lobechat.com/discover/provider/search1api)**: Search1API provides access to the DeepSeek series of models that can connect to the internet as needed, including standard and fast versions, supporting a variety of model sizes.
284
285
  - **[InfiniAI](https://lobechat.com/discover/provider/infiniai)**: Provides high-performance, easy-to-use, and secure large model services for application developers, covering the entire process from large model development to service deployment.
285
286
  - **[Qiniu](https://lobechat.com/discover/provider/qiniu)**: Qiniu, as a long-established cloud service provider, delivers cost-effective and reliable AI inference services for both real-time and batch processing, with a simple and user-friendly experience.
286
- - **[302.AI](https://lobechat.com/discover/provider/ai302)**: 302.AI is an on-demand AI application platform offering the most comprehensive AI APIs and online AI applications available on the market.
287
287
 
288
288
  </details>
289
289
 
package/README.zh-CN.md CHANGED
@@ -255,6 +255,7 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
255
255
  - **[GitHub](https://lobechat.com/discover/provider/github)**: 通过 GitHub 模型,开发人员可以成为 AI 工程师,并使用行业领先的 AI 模型进行构建。
256
256
  - **[Novita](https://lobechat.com/discover/provider/novita)**: Novita AI 是一个提供多种大语言模型与 AI 图像生成的 API 服务的平台,灵活、可靠且具有成本效益。它支持 Llama3、Mistral 等最新的开源模型,并为生成式 AI 应用开发提供了全面、用户友好且自动扩展的 API 解决方案,适合 AI 初创公司的快速发展。
257
257
  - **[PPIO](https://lobechat.com/discover/provider/ppio)**: PPIO 派欧云提供稳定、高性价比的开源模型 API 服务,支持 DeepSeek 全系列、Llama、Qwen 等行业领先大模型。
258
+ - **[302.AI](https://lobechat.com/discover/provider/ai302)**: 302.AI 是一个按需付费的 AI 应用平台,提供市面上最全的 AI API 和 AI 在线应用
258
259
  - **[Together AI](https://lobechat.com/discover/provider/togetherai)**: Together AI 致力于通过创新的 AI 模型实现领先的性能,提供广泛的自定义能力,包括快速扩展支持和直观的部署流程,满足企业的各种需求。
259
260
  - **[Fireworks AI](https://lobechat.com/discover/provider/fireworksai)**: Fireworks AI 是一家领先的高级语言模型服务商,专注于功能调用和多模态处理。其最新模型 Firefunction V2 基于 Llama-3,优化用于函数调用、对话及指令跟随。视觉语言模型 FireLLaVA-13B 支持图像和文本混合输入。其他 notable 模型包括 Llama 系列和 Mixtral 系列,提供高效的多语言指令跟随与生成支持。
260
261
  - **[Groq](https://lobechat.com/discover/provider/groq)**: Groq 的 LPU 推理引擎在最新的独立大语言模型(LLM)基准测试中表现卓越,以其惊人的速度和效率重新定义了 AI 解决方案的标准。Groq 是一种即时推理速度的代表,在基于云的部署中展现了良好的性能。
@@ -283,7 +284,6 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
283
284
  - **[Search1API](https://lobechat.com/discover/provider/search1api)**: Search1API 提供可根据需要自行联网的 DeepSeek 系列模型的访问,包括标准版和快速版本,支持多种参数规模的模型选择。
284
285
  - **[InfiniAI](https://lobechat.com/discover/provider/infiniai)**: 为应用开发者提供高性能、易上手、安全可靠的大模型服务,覆盖从大模型开发到大模型服务化部署的全流程。
285
286
  - **[Qiniu](https://lobechat.com/discover/provider/qiniu)**: 七牛作为老牌云服务厂商,提供高性价比稳定的实时、批量 AI 推理服务,简单易用。
286
- - **[302.AI](https://lobechat.com/discover/provider/ai302)**: 302.AI 是一个按需付费的 AI 应用平台,提供市面上最全的 AI API 和 AI 在线应用
287
287
 
288
288
  </details>
289
289
 
package/changelog/v1.json CHANGED
@@ -1,4 +1,14 @@
1
1
  [
2
+ {
3
+ "children": {},
4
+ "date": "2025-08-18",
5
+ "version": "1.113.2"
6
+ },
7
+ {
8
+ "children": {},
9
+ "date": "2025-08-17",
10
+ "version": "1.113.1"
11
+ },
2
12
  {
3
13
  "children": {},
4
14
  "date": "2025-08-17",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.113.0",
3
+ "version": "1.113.2",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -121,7 +121,7 @@
121
121
  "dependencies": {
122
122
  "@ant-design/icons": "^5.6.1",
123
123
  "@ant-design/pro-components": "^2.8.10",
124
- "@anthropic-ai/sdk": "^0.59.0",
124
+ "@anthropic-ai/sdk": "^0.60.0",
125
125
  "@auth/core": "^0.40.0",
126
126
  "@aws-sdk/client-s3": "^3.862.0",
127
127
  "@aws-sdk/s3-request-presigner": "^3.862.0",
@@ -1,6 +1,7 @@
1
1
  import { NextResponse } from 'next/server';
2
2
 
3
3
  import { authEnv } from '@/config/auth';
4
+ import { serverDB } from '@/database/server';
4
5
  import { pino } from '@/libs/logger';
5
6
  import { NextAuthUserService } from '@/server/services/nextAuthUser';
6
7
 
@@ -18,7 +19,7 @@ export const POST = async (req: Request): Promise<NextResponse> => {
18
19
 
19
20
  const { action, object } = payload;
20
21
 
21
- const nextAuthUserService = new NextAuthUserService();
22
+ const nextAuthUserService = new NextAuthUserService(serverDB);
22
23
  switch (action) {
23
24
  case 'update-user': {
24
25
  return nextAuthUserService.safeUpdateUser(
@@ -2,6 +2,7 @@ import { NextResponse } from 'next/server';
2
2
 
3
3
  import { authEnv } from '@/config/auth';
4
4
  import { isServerMode } from '@/const/version';
5
+ import { serverDB } from '@/database/server';
5
6
  import { pino } from '@/libs/logger';
6
7
  import { UserService } from '@/server/services/user';
7
8
 
@@ -25,7 +26,7 @@ export const POST = async (req: Request): Promise<NextResponse> => {
25
26
 
26
27
  pino.trace(`clerk webhook payload: ${{ data, type }}`);
27
28
 
28
- const userService = new UserService();
29
+ const userService = new UserService(serverDB);
29
30
  switch (type) {
30
31
  case 'user.created': {
31
32
  pino.info('creating user due to clerk webhook');
@@ -1,6 +1,7 @@
1
1
  import { NextResponse } from 'next/server';
2
2
 
3
3
  import { authEnv } from '@/config/auth';
4
+ import { serverDB } from '@/database/server';
4
5
  import { pino } from '@/libs/logger';
5
6
  import { NextAuthUserService } from '@/server/services/nextAuthUser';
6
7
 
@@ -20,7 +21,7 @@ export const POST = async (req: Request): Promise<NextResponse> => {
20
21
 
21
22
  pino.trace(`logto webhook payload: ${{ data, event }}`);
22
23
 
23
- const nextAuthUserService = new NextAuthUserService();
24
+ const nextAuthUserService = new NextAuthUserService(serverDB);
24
25
  switch (event) {
25
26
  case 'User.Data.Updated': {
26
27
  return nextAuthUserService.safeUpdateUser(
@@ -1,3 +1,4 @@
1
+ import { serverDB } from '@/database/server';
1
2
  import { UserService } from '@/server/services/user';
2
3
 
3
4
  export const runtime = 'nodejs';
@@ -31,7 +32,7 @@ export const GET = async (req: Request, segmentData: { params: Params }) => {
31
32
  try {
32
33
  const params = await segmentData.params;
33
34
  const type = getContentType(params.image);
34
- const userService = new UserService();
35
+ const userService = new UserService(serverDB);
35
36
 
36
37
  const userAvatar = await userService.getUserAvatar(params.id, params.image);
37
38
  if (!userAvatar) {
@@ -136,6 +136,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
136
136
  BflProvider,
137
137
  NovitaProvider,
138
138
  PPIOProvider,
139
+ Ai302Provider,
139
140
  NvidiaProvider,
140
141
  TogetherAIProvider,
141
142
  FireworksAIProvider,
@@ -172,7 +173,6 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
172
173
  Search1APIProvider,
173
174
  InfiniAIProvider,
174
175
  QiniuProvider,
175
- Ai302Provider,
176
176
  ];
177
177
 
178
178
  export const filterEnabledModels = (provider: ModelProviderCard) => {
@@ -1,18 +1,20 @@
1
1
  import { and, desc, eq } from 'drizzle-orm';
2
2
 
3
3
  import { NewEvalDatasetsItem, evalDatasets } from '@/database/schemas';
4
- import { serverDB } from '@/database/server';
4
+ import { LobeChatDatabase } from '@/database/type';
5
5
  import { RAGEvalDataSetItem } from '@/types/eval';
6
6
 
7
7
  export class EvalDatasetModel {
8
8
  private userId: string;
9
+ private db: LobeChatDatabase;
9
10
 
10
- constructor(userId: string) {
11
+ constructor(db: LobeChatDatabase, userId: string) {
12
+ this.db = db;
11
13
  this.userId = userId;
12
14
  }
13
15
 
14
16
  create = async (params: NewEvalDatasetsItem) => {
15
- const [result] = await serverDB
17
+ const [result] = await this.db
16
18
  .insert(evalDatasets)
17
19
  .values({ ...params, userId: this.userId })
18
20
  .returning();
@@ -20,13 +22,13 @@ export class EvalDatasetModel {
20
22
  };
21
23
 
22
24
  delete = async (id: number) => {
23
- return serverDB
25
+ return this.db
24
26
  .delete(evalDatasets)
25
27
  .where(and(eq(evalDatasets.id, id), eq(evalDatasets.userId, this.userId)));
26
28
  };
27
29
 
28
30
  query = async (knowledgeBaseId: string): Promise<RAGEvalDataSetItem[]> => {
29
- return serverDB
31
+ return this.db
30
32
  .select({
31
33
  createdAt: evalDatasets.createdAt,
32
34
  description: evalDatasets.description,
@@ -45,13 +47,13 @@ export class EvalDatasetModel {
45
47
  };
46
48
 
47
49
  findById = async (id: number) => {
48
- return serverDB.query.evalDatasets.findFirst({
50
+ return this.db.query.evalDatasets.findFirst({
49
51
  where: and(eq(evalDatasets.id, id), eq(evalDatasets.userId, this.userId)),
50
52
  });
51
53
  };
52
54
 
53
55
  update = async (id: number, value: Partial<NewEvalDatasetsItem>) => {
54
- return serverDB
56
+ return this.db
55
57
  .update(evalDatasets)
56
58
  .set({ ...value, updatedAt: new Date() })
57
59
  .where(and(eq(evalDatasets.id, id), eq(evalDatasets.userId, this.userId)));
@@ -1,18 +1,20 @@
1
1
  import { and, eq, inArray } from 'drizzle-orm';
2
2
 
3
3
  import { NewEvalDatasetRecordsItem, evalDatasetRecords, files } from '@/database/schemas';
4
- import { serverDB } from '@/database/server';
4
+ import { LobeChatDatabase } from '@/database/type';
5
5
  import { EvalDatasetRecordRefFile } from '@/types/eval';
6
6
 
7
7
  export class EvalDatasetRecordModel {
8
8
  private userId: string;
9
+ private db: LobeChatDatabase;
9
10
 
10
- constructor(userId: string) {
11
+ constructor(db: LobeChatDatabase, userId: string) {
12
+ this.db = db;
11
13
  this.userId = userId;
12
14
  }
13
15
 
14
16
  create = async (params: NewEvalDatasetRecordsItem) => {
15
- const [result] = await serverDB
17
+ const [result] = await this.db
16
18
  .insert(evalDatasetRecords)
17
19
  .values({ ...params, userId: this.userId })
18
20
  .returning();
@@ -20,7 +22,7 @@ export class EvalDatasetRecordModel {
20
22
  };
21
23
 
22
24
  batchCreate = async (params: NewEvalDatasetRecordsItem[]) => {
23
- const [result] = await serverDB
25
+ const [result] = await this.db
24
26
  .insert(evalDatasetRecords)
25
27
  .values(params.map((item) => ({ ...item, userId: this.userId })))
26
28
  .returning();
@@ -29,13 +31,13 @@ export class EvalDatasetRecordModel {
29
31
  };
30
32
 
31
33
  delete = async (id: number) => {
32
- return serverDB
34
+ return this.db
33
35
  .delete(evalDatasetRecords)
34
36
  .where(and(eq(evalDatasetRecords.id, id), eq(evalDatasetRecords.userId, this.userId)));
35
37
  };
36
38
 
37
39
  query = async (datasetId: number) => {
38
- const list = await serverDB.query.evalDatasetRecords.findMany({
40
+ const list = await this.db.query.evalDatasetRecords.findMany({
39
41
  where: and(
40
42
  eq(evalDatasetRecords.datasetId, datasetId),
41
43
  eq(evalDatasetRecords.userId, this.userId),
@@ -43,7 +45,7 @@ export class EvalDatasetRecordModel {
43
45
  });
44
46
  const fileList = list.flatMap((item) => item.referenceFiles).filter(Boolean) as string[];
45
47
 
46
- const fileItems = await serverDB
48
+ const fileItems = await this.db
47
49
  .select({ fileType: files.fileType, id: files.id, name: files.name })
48
50
  .from(files)
49
51
  .where(and(inArray(files.id, fileList), eq(files.userId, this.userId)));
@@ -59,7 +61,7 @@ export class EvalDatasetRecordModel {
59
61
  };
60
62
 
61
63
  findByDatasetId = async (datasetId: number) => {
62
- return serverDB.query.evalDatasetRecords.findMany({
64
+ return this.db.query.evalDatasetRecords.findMany({
63
65
  where: and(
64
66
  eq(evalDatasetRecords.datasetId, datasetId),
65
67
  eq(evalDatasetRecords.userId, this.userId),
@@ -68,13 +70,13 @@ export class EvalDatasetRecordModel {
68
70
  };
69
71
 
70
72
  findById = async (id: number) => {
71
- return serverDB.query.evalDatasetRecords.findFirst({
73
+ return this.db.query.evalDatasetRecords.findFirst({
72
74
  where: and(eq(evalDatasetRecords.id, id), eq(evalDatasetRecords.userId, this.userId)),
73
75
  });
74
76
  };
75
77
 
76
78
  update = async (id: number, value: Partial<NewEvalDatasetRecordsItem>) => {
77
- return serverDB
79
+ return this.db
78
80
  .update(evalDatasetRecords)
79
81
  .set(value)
80
82
  .where(and(eq(evalDatasetRecords.id, id), eq(evalDatasetRecords.userId, this.userId)));
@@ -6,18 +6,20 @@ import {
6
6
  evalEvaluation,
7
7
  evaluationRecords,
8
8
  } from '@/database/schemas';
9
- import { serverDB } from '@/database/server';
9
+ import { LobeChatDatabase } from '@/database/type';
10
10
  import { EvalEvaluationStatus, RAGEvalEvaluationItem } from '@/types/eval';
11
11
 
12
12
  export class EvalEvaluationModel {
13
13
  private userId: string;
14
+ private db: LobeChatDatabase;
14
15
 
15
- constructor(userId: string) {
16
+ constructor(db: LobeChatDatabase, userId: string) {
17
+ this.db = db;
16
18
  this.userId = userId;
17
19
  }
18
20
 
19
21
  create = async (params: NewEvalEvaluationItem) => {
20
- const [result] = await serverDB
22
+ const [result] = await this.db
21
23
  .insert(evalEvaluation)
22
24
  .values({ ...params, userId: this.userId })
23
25
  .returning();
@@ -25,13 +27,13 @@ export class EvalEvaluationModel {
25
27
  };
26
28
 
27
29
  delete = async (id: number) => {
28
- return serverDB
30
+ return this.db
29
31
  .delete(evalEvaluation)
30
32
  .where(and(eq(evalEvaluation.id, id), eq(evalEvaluation.userId, this.userId)));
31
33
  };
32
34
 
33
35
  queryByKnowledgeBaseId = async (knowledgeBaseId: string) => {
34
- const evaluations = await serverDB
36
+ const evaluations = await this.db
35
37
  .select({
36
38
  createdAt: evalEvaluation.createdAt,
37
39
  dataset: {
@@ -57,7 +59,7 @@ export class EvalEvaluationModel {
57
59
  // 然后查询每个评估的记录统计
58
60
  const evaluationIds = evaluations.map((evals) => evals.id);
59
61
 
60
- const recordStats = await serverDB
62
+ const recordStats = await this.db
61
63
  .select({
62
64
  evaluationId: evaluationRecords.evaluationId,
63
65
  success: count(evaluationRecords.status).if(
@@ -82,13 +84,13 @@ export class EvalEvaluationModel {
82
84
  };
83
85
 
84
86
  findById = async (id: number) => {
85
- return serverDB.query.evalEvaluation.findFirst({
87
+ return this.db.query.evalEvaluation.findFirst({
86
88
  where: and(eq(evalEvaluation.id, id), eq(evalEvaluation.userId, this.userId)),
87
89
  });
88
90
  };
89
91
 
90
92
  update = async (id: number, value: Partial<NewEvalEvaluationItem>) => {
91
- return serverDB
93
+ return this.db
92
94
  .update(evalEvaluation)
93
95
  .set(value)
94
96
  .where(and(eq(evalEvaluation.id, id), eq(evalEvaluation.userId, this.userId)));
@@ -1,17 +1,19 @@
1
1
  import { and, eq } from 'drizzle-orm';
2
2
 
3
3
  import { NewEvaluationRecordsItem, evaluationRecords } from '@/database/schemas';
4
- import { serverDB } from '@/database/server';
4
+ import { LobeChatDatabase } from '@/database/type';
5
5
 
6
6
  export class EvaluationRecordModel {
7
7
  private userId: string;
8
+ private db: LobeChatDatabase;
8
9
 
9
- constructor(userId: string) {
10
+ constructor(db: LobeChatDatabase, userId: string) {
11
+ this.db = db;
10
12
  this.userId = userId;
11
13
  }
12
14
 
13
15
  create = async (params: NewEvaluationRecordsItem) => {
14
- const [result] = await serverDB
16
+ const [result] = await this.db
15
17
  .insert(evaluationRecords)
16
18
  .values({ ...params, userId: this.userId })
17
19
  .returning();
@@ -19,20 +21,20 @@ export class EvaluationRecordModel {
19
21
  };
20
22
 
21
23
  batchCreate = async (params: NewEvaluationRecordsItem[]) => {
22
- return serverDB
24
+ return this.db
23
25
  .insert(evaluationRecords)
24
26
  .values(params.map((item) => ({ ...item, userId: this.userId })))
25
27
  .returning();
26
28
  };
27
29
 
28
30
  delete = async (id: number) => {
29
- return serverDB
31
+ return this.db
30
32
  .delete(evaluationRecords)
31
33
  .where(and(eq(evaluationRecords.id, id), eq(evaluationRecords.userId, this.userId)));
32
34
  };
33
35
 
34
36
  query = async (reportId: number) => {
35
- return serverDB.query.evaluationRecords.findMany({
37
+ return this.db.query.evaluationRecords.findMany({
36
38
  where: and(
37
39
  eq(evaluationRecords.evaluationId, reportId),
38
40
  eq(evaluationRecords.userId, this.userId),
@@ -41,13 +43,13 @@ export class EvaluationRecordModel {
41
43
  };
42
44
 
43
45
  findById = async (id: number) => {
44
- return serverDB.query.evaluationRecords.findFirst({
46
+ return this.db.query.evaluationRecords.findFirst({
45
47
  where: and(eq(evaluationRecords.id, id), eq(evaluationRecords.userId, this.userId)),
46
48
  });
47
49
  };
48
50
 
49
51
  findByEvaluationId = async (evaluationId: number) => {
50
- return serverDB.query.evaluationRecords.findMany({
52
+ return this.db.query.evaluationRecords.findMany({
51
53
  where: and(
52
54
  eq(evaluationRecords.evaluationId, evaluationId),
53
55
  eq(evaluationRecords.userId, this.userId),
@@ -56,7 +58,7 @@ export class EvaluationRecordModel {
56
58
  };
57
59
 
58
60
  update = async (id: number, value: Partial<NewEvaluationRecordsItem>) => {
59
- return serverDB
61
+ return this.db
60
62
  .update(evaluationRecords)
61
63
  .set(value)
62
64
  .where(and(eq(evaluationRecords.id, id), eq(evaluationRecords.userId, this.userId)));
@@ -32,7 +32,7 @@ const fileProcedure = asyncAuthedProcedure.use(async (opts) => {
32
32
  ctx: {
33
33
  asyncTaskModel: new AsyncTaskModel(ctx.serverDB, ctx.userId),
34
34
  chunkModel: new ChunkModel(ctx.serverDB, ctx.userId),
35
- chunkService: new ChunkService(ctx.userId),
35
+ chunkService: new ChunkService(ctx.serverDB, ctx.userId),
36
36
  embeddingModel: new EmbeddingModel(ctx.serverDB, ctx.userId),
37
37
  fileModel: new FileModel(ctx.serverDB, ctx.userId),
38
38
  fileService: new FileService(ctx.serverDB, ctx.userId),
@@ -25,11 +25,11 @@ const ragEvalProcedure = asyncAuthedProcedure.use(async (opts) => {
25
25
  return opts.next({
26
26
  ctx: {
27
27
  chunkModel: new ChunkModel(ctx.serverDB, ctx.userId),
28
- chunkService: new ChunkService(ctx.userId),
29
- datasetRecordModel: new EvalDatasetRecordModel(ctx.userId),
28
+ chunkService: new ChunkService(ctx.serverDB, ctx.userId),
29
+ datasetRecordModel: new EvalDatasetRecordModel(ctx.serverDB, ctx.userId),
30
30
  embeddingModel: new EmbeddingModel(ctx.serverDB, ctx.userId),
31
- evalRecordModel: new EvaluationRecordModel(ctx.userId),
32
- evaluationModel: new EvalEvaluationModel(ctx.userId),
31
+ evalRecordModel: new EvaluationRecordModel(ctx.serverDB, ctx.userId),
32
+ evaluationModel: new EvalEvaluationModel(ctx.serverDB, ctx.userId),
33
33
  fileModel: new FileModel(ctx.serverDB, ctx.userId),
34
34
  },
35
35
  });
@@ -26,7 +26,7 @@ const chunkProcedure = authedProcedure
26
26
  ctx: {
27
27
  asyncTaskModel: new AsyncTaskModel(ctx.serverDB, ctx.userId),
28
28
  chunkModel: new ChunkModel(ctx.serverDB, ctx.userId),
29
- chunkService: new ChunkService(ctx.userId),
29
+ chunkService: new ChunkService(ctx.serverDB, ctx.userId),
30
30
  embeddingModel: new EmbeddingModel(ctx.serverDB, ctx.userId),
31
31
  fileModel: new FileModel(ctx.serverDB, ctx.userId),
32
32
  messageModel: new MessageModel(ctx.serverDB, ctx.userId),
@@ -35,11 +35,11 @@ const ragEvalProcedure = authedProcedure
35
35
 
36
36
  return opts.next({
37
37
  ctx: {
38
- datasetModel: new EvalDatasetModel(ctx.userId),
38
+ datasetModel: new EvalDatasetModel(ctx.serverDB, ctx.userId),
39
39
  fileModel: new FileModel(ctx.serverDB, ctx.userId),
40
- datasetRecordModel: new EvalDatasetRecordModel(ctx.userId),
41
- evaluationModel: new EvalEvaluationModel(ctx.userId),
42
- evaluationRecordModel: new EvaluationRecordModel(ctx.userId),
40
+ datasetRecordModel: new EvalDatasetRecordModel(ctx.serverDB, ctx.userId),
41
+ evaluationModel: new EvalEvaluationModel(ctx.serverDB, ctx.userId),
42
+ evaluationRecordModel: new EvaluationRecordModel(ctx.serverDB, ctx.userId),
43
43
  fileService: new FileService(ctx.serverDB, ctx.userId),
44
44
  },
45
45
  });
@@ -58,7 +58,7 @@ export const userRouter = router({
58
58
  if (enableClerk) {
59
59
  const user = await ctx.clerkAuth.getCurrentUser();
60
60
  if (user) {
61
- const userService = new UserService();
61
+ const userService = new UserService(ctx.serverDB);
62
62
 
63
63
  await userService.createUser(user.id, {
64
64
  created_at: user.createdAt,
@@ -1,7 +1,7 @@
1
1
  import { ClientSecretPayload } from '@/const/auth';
2
2
  import { AsyncTaskModel } from '@/database/models/asyncTask';
3
3
  import { FileModel } from '@/database/models/file';
4
- import { serverDB } from '@/database/server';
4
+ import { LobeChatDatabase } from '@/database/type';
5
5
  import { ChunkContentParams, ContentChunk } from '@/server/modules/ContentChunk';
6
6
  import { createAsyncCaller } from '@/server/routers/async';
7
7
  import {
@@ -17,7 +17,7 @@ export class ChunkService {
17
17
  private fileModel: FileModel;
18
18
  private asyncTaskModel: AsyncTaskModel;
19
19
 
20
- constructor(userId: string) {
20
+ constructor(serverDB: LobeChatDatabase, userId: string) {
21
21
  this.userId = userId;
22
22
 
23
23
  this.chunkClient = new ContentChunk();
@@ -24,7 +24,7 @@ describe('NextAuthUserService', () => {
24
24
 
25
25
  beforeEach(async () => {
26
26
  vi.clearAllMocks();
27
- service = new NextAuthUserService();
27
+ service = new NextAuthUserService(serverDB);
28
28
  });
29
29
 
30
30
  describe('safeUpdateUser', () => {
@@ -2,15 +2,17 @@ import { NextResponse } from 'next/server';
2
2
 
3
3
  import { UserModel } from '@/database/models/user';
4
4
  import { UserItem } from '@/database/schemas';
5
- import { serverDB } from '@/database/server';
5
+ import { LobeChatDatabase } from '@/database/type';
6
6
  import { pino } from '@/libs/logger';
7
7
  import { LobeNextAuthDbAdapter } from '@/libs/next-auth/adapter';
8
8
 
9
9
  export class NextAuthUserService {
10
10
  adapter;
11
+ private db: LobeChatDatabase;
11
12
 
12
- constructor() {
13
- this.adapter = LobeNextAuthDbAdapter(serverDB);
13
+ constructor(db: LobeChatDatabase) {
14
+ this.db = db;
15
+ this.adapter = LobeNextAuthDbAdapter(db);
14
16
  }
15
17
 
16
18
  safeUpdateUser = async (
@@ -27,7 +29,7 @@ export class NextAuthUserService {
27
29
 
28
30
  // 2. If found, Update user data from provider
29
31
  if (user?.id) {
30
- const userModel = new UserModel(serverDB, user.id);
32
+ const userModel = new UserModel(this.db, user.id);
31
33
 
32
34
  // Perform update
33
35
  await userModel.updateUser({
@@ -3,6 +3,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
3
3
 
4
4
  import { UserModel } from '@/database/models/user';
5
5
  import { UserItem } from '@/database/schemas';
6
+ import { LobeChatDatabase } from '@/database/type';
6
7
  import { pino } from '@/libs/logger';
7
8
  import { AgentService } from '@/server/services/agent';
8
9
 
@@ -46,6 +47,7 @@ vi.mock('@/server/services/agent', () => ({
46
47
 
47
48
  let service: UserService;
48
49
  const mockUserId = 'test-user-id';
50
+ const mockDB = {} as LobeChatDatabase;
49
51
 
50
52
  // Mock user data
51
53
  const mockUserJSON: UserJSON = {
@@ -62,7 +64,7 @@ const mockUserJSON: UserJSON = {
62
64
  } as unknown as UserJSON;
63
65
 
64
66
  beforeEach(() => {
65
- service = new UserService();
67
+ service = new UserService(mockDB);
66
68
  vi.clearAllMocks();
67
69
  });
68
70
 
@@ -1,7 +1,7 @@
1
1
  import { UserJSON } from '@clerk/backend';
2
2
 
3
3
  import { UserModel } from '@/database/models/user';
4
- import { serverDB } from '@/database/server';
4
+ import { LobeChatDatabase } from '@/database/type';
5
5
  import { initializeServerAnalytics } from '@/libs/analytics';
6
6
  import { pino } from '@/libs/logger';
7
7
  import { KeyVaultsGateKeeper } from '@/server/modules/KeyVaultsEncrypt';
@@ -9,9 +9,15 @@ import { S3 } from '@/server/modules/S3';
9
9
  import { AgentService } from '@/server/services/agent';
10
10
 
11
11
  export class UserService {
12
+ private db: LobeChatDatabase;
13
+
14
+ constructor(db: LobeChatDatabase) {
15
+ this.db = db;
16
+ }
17
+
12
18
  createUser = async (id: string, params: UserJSON) => {
13
19
  // Check if user already exists
14
- const res = await UserModel.findById(serverDB, id);
20
+ const res = await UserModel.findById(this.db, id);
15
21
 
16
22
  // If user already exists, skip creating a new user
17
23
  if (res)
@@ -33,7 +39,7 @@ export class UserService {
33
39
  /* ↑ cloud slot ↑ */
34
40
 
35
41
  // 2. create user in database
36
- await UserModel.createUser(serverDB, {
42
+ await UserModel.createUser(this.db, {
37
43
  avatar: params.image_url,
38
44
  clerkCreatedAt: new Date(params.created_at),
39
45
  email: email?.email_address,
@@ -45,7 +51,7 @@ export class UserService {
45
51
  });
46
52
 
47
53
  // 3. Create an inbox session for the user
48
- const agentService = new AgentService(serverDB, id);
54
+ const agentService = new AgentService(this.db, id);
49
55
  await agentService.createInbox();
50
56
 
51
57
  /* ↓ cloud slot ↓ */
@@ -73,14 +79,14 @@ export class UserService {
73
79
  };
74
80
 
75
81
  deleteUser = async (id: string) => {
76
- await UserModel.deleteUser(serverDB, id);
82
+ await UserModel.deleteUser(this.db, id);
77
83
  };
78
84
 
79
85
  updateUser = async (id: string, params: UserJSON) => {
80
- const userModel = new UserModel(serverDB, id);
86
+ const userModel = new UserModel(this.db, id);
81
87
 
82
88
  // Check if user already exists
83
- const res = await UserModel.findById(serverDB, id);
89
+ const res = await UserModel.findById(this.db, id);
84
90
 
85
91
  // If user not exists, skip update the user
86
92
  if (!res)
@@ -111,7 +117,7 @@ export class UserService {
111
117
  };
112
118
 
113
119
  getUserApiKeys = async (id: string) => {
114
- return UserModel.getUserApiKeys(serverDB, id, KeyVaultsGateKeeper.getUserKeyVaults);
120
+ return UserModel.getUserApiKeys(this.db, id, KeyVaultsGateKeeper.getUserKeyVaults);
115
121
  };
116
122
 
117
123
  getUserAvatar = async (id: string, image: string) => {
@@ -45,7 +45,7 @@ class MCPService {
45
45
 
46
46
  const data = {
47
47
  args,
48
- env: plugin.settings,
48
+ env: plugin.settings || plugin.customParams?.mcp?.env,
49
49
  params: { ...plugin.customParams?.mcp, name: identifier } as any,
50
50
  toolName: apiName,
51
51
  };