@lobehub/chat 1.36.15 → 1.36.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/package.json +1 -1
- package/src/database/repositories/dataImporter/index.ts +3 -2
- package/src/database/schemas/file.ts +1 -15
- package/src/database/schemas/relations.ts +17 -2
- package/src/database/server/models/__tests__/file.test.ts +26 -0
- package/src/database/server/models/file.ts +14 -1
- package/src/server/routers/lambda/file.ts +9 -16
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.36.16](https://github.com/lobehub/lobe-chat/compare/v1.36.15...v1.36.16)
|
6
|
+
|
7
|
+
<sup>Released on **2024-12-12**</sup>
|
8
|
+
|
9
|
+
#### ♻ Code Refactoring
|
10
|
+
|
11
|
+
- **misc**: Refactor the file model method.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### Code refactoring
|
19
|
+
|
20
|
+
- **misc**: Refactor the file model method, closes [#4998](https://github.com/lobehub/lobe-chat/issues/4998) ([00af34e](https://github.com/lobehub/lobe-chat/commit/00af34e))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.36.15](https://github.com/lobehub/lobe-chat/compare/v1.36.14...v1.36.15)
|
6
31
|
|
7
32
|
<sup>Released on **2024-12-12**</sup>
|
package/changelog/v1.json
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.36.
|
3
|
+
"version": "1.36.16",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -41,8 +41,8 @@ export class DataImporterRepos {
|
|
41
41
|
let sessionIdMap: Record<string, string> = {};
|
42
42
|
let topicIdMap: Record<string, string> = {};
|
43
43
|
|
44
|
-
// import sessionGroups
|
45
44
|
await this.db.transaction(async (trx) => {
|
45
|
+
// import sessionGroups
|
46
46
|
if (data.sessionGroups && data.sessionGroups.length > 0) {
|
47
47
|
const query = await trx.query.sessionGroups.findMany({
|
48
48
|
where: and(
|
@@ -164,10 +164,11 @@ export class DataImporterRepos {
|
|
164
164
|
const mapArray = await trx
|
165
165
|
.insert(topics)
|
166
166
|
.values(
|
167
|
-
data.topics.map(({ id, createdAt, updatedAt, sessionId, ...res }) => ({
|
167
|
+
data.topics.map(({ id, createdAt, updatedAt, sessionId, favorite, ...res }) => ({
|
168
168
|
...res,
|
169
169
|
clientId: id,
|
170
170
|
createdAt: new Date(createdAt),
|
171
|
+
favorite: Boolean(favorite),
|
171
172
|
sessionId: sessionId ? sessionIdMap[sessionId] : null,
|
172
173
|
updatedAt: new Date(updatedAt),
|
173
174
|
userId: this.userId,
|
@@ -12,9 +12,9 @@ import {
|
|
12
12
|
import { createInsertSchema } from 'drizzle-zod';
|
13
13
|
|
14
14
|
import { idGenerator } from '@/database/utils/idGenerator';
|
15
|
+
|
15
16
|
import { accessedAt, createdAt, timestamps } from './_helpers';
|
16
17
|
import { asyncTasks } from './asyncTask';
|
17
|
-
import { chunks } from './rag';
|
18
18
|
import { users } from './user';
|
19
19
|
|
20
20
|
export const globalFiles = pgTable('global_files', {
|
@@ -59,20 +59,6 @@ export const files = pgTable('files', {
|
|
59
59
|
export type NewFile = typeof files.$inferInsert;
|
60
60
|
export type FileItem = typeof files.$inferSelect;
|
61
61
|
|
62
|
-
export const fileChunks = pgTable(
|
63
|
-
'file_chunks',
|
64
|
-
{
|
65
|
-
fileId: varchar('file_id').references(() => files.id, { onDelete: 'cascade' }),
|
66
|
-
chunkId: uuid('chunk_id').references(() => chunks.id, { onDelete: 'cascade' }),
|
67
|
-
createdAt: createdAt(),
|
68
|
-
},
|
69
|
-
(t) => ({
|
70
|
-
pk: primaryKey({ columns: [t.fileId, t.chunkId] }),
|
71
|
-
}),
|
72
|
-
);
|
73
|
-
|
74
|
-
export type NewFileChunkItem = typeof fileChunks.$inferInsert;
|
75
|
-
|
76
62
|
export const knowledgeBases = pgTable('knowledge_bases', {
|
77
63
|
id: text('id')
|
78
64
|
.$defaultFn(() => idGenerator('knowledgeBases'))
|
@@ -1,12 +1,14 @@
|
|
1
1
|
/* eslint-disable sort-keys-fix/sort-keys-fix */
|
2
2
|
import { relations } from 'drizzle-orm';
|
3
|
-
import { pgTable, primaryKey, text } from 'drizzle-orm/pg-core';
|
3
|
+
import { pgTable, primaryKey, text, uuid, varchar } from 'drizzle-orm/pg-core';
|
4
|
+
|
5
|
+
import { createdAt } from '@/database/schemas/_helpers';
|
4
6
|
|
5
7
|
import { agents, agentsFiles, agentsKnowledgeBases } from './agent';
|
6
8
|
import { asyncTasks } from './asyncTask';
|
7
9
|
import { files, knowledgeBases } from './file';
|
8
10
|
import { messages, messagesFiles } from './message';
|
9
|
-
import { unstructuredChunks } from './rag';
|
11
|
+
import { chunks, unstructuredChunks } from './rag';
|
10
12
|
import { sessionGroups, sessions } from './session';
|
11
13
|
import { threads, topics } from './topic';
|
12
14
|
|
@@ -40,6 +42,19 @@ export const filesToSessions = pgTable(
|
|
40
42
|
}),
|
41
43
|
);
|
42
44
|
|
45
|
+
export const fileChunks = pgTable(
|
46
|
+
'file_chunks',
|
47
|
+
{
|
48
|
+
fileId: varchar('file_id').references(() => files.id, { onDelete: 'cascade' }),
|
49
|
+
chunkId: uuid('chunk_id').references(() => chunks.id, { onDelete: 'cascade' }),
|
50
|
+
createdAt: createdAt(),
|
51
|
+
},
|
52
|
+
(t) => ({
|
53
|
+
pk: primaryKey({ columns: [t.fileId, t.chunkId] }),
|
54
|
+
}),
|
55
|
+
);
|
56
|
+
export type NewFileChunkItem = typeof fileChunks.$inferInsert;
|
57
|
+
|
43
58
|
export const topicRelations = relations(topics, ({ one }) => ({
|
44
59
|
session: one(sessions, {
|
45
60
|
fields: [topics.sessionId],
|
@@ -59,6 +59,32 @@ describe('FileModel', () => {
|
|
59
59
|
});
|
60
60
|
expect(kbFile).toMatchObject({ fileId: id, knowledgeBaseId: 'kb1' });
|
61
61
|
});
|
62
|
+
|
63
|
+
it('should create a new file with hash', async () => {
|
64
|
+
const params = {
|
65
|
+
name: 'test-file.txt',
|
66
|
+
url: 'https://example.com/test-file.txt',
|
67
|
+
size: 100,
|
68
|
+
fileHash: 'abc',
|
69
|
+
fileType: 'text/plain',
|
70
|
+
};
|
71
|
+
|
72
|
+
const { id } = await fileModel.create(params, true);
|
73
|
+
expect(id).toBeDefined();
|
74
|
+
|
75
|
+
const file = await serverDB.query.files.findFirst({ where: eq(files.id, id) });
|
76
|
+
expect(file).toMatchObject({ ...params, userId });
|
77
|
+
|
78
|
+
const globalFile = await serverDB.query.globalFiles.findFirst({
|
79
|
+
where: eq(globalFiles.hashId, params.fileHash),
|
80
|
+
});
|
81
|
+
expect(globalFile).toMatchObject({
|
82
|
+
url: 'https://example.com/test-file.txt',
|
83
|
+
size: 100,
|
84
|
+
hashId: 'abc',
|
85
|
+
fileType: 'text/plain',
|
86
|
+
});
|
87
|
+
});
|
62
88
|
});
|
63
89
|
|
64
90
|
describe('createGlobalFile', () => {
|
@@ -26,8 +26,21 @@ export class FileModel {
|
|
26
26
|
this.db = db;
|
27
27
|
}
|
28
28
|
|
29
|
-
create = async (
|
29
|
+
create = async (
|
30
|
+
params: Omit<NewFile, 'id' | 'userId'> & { knowledgeBaseId?: string },
|
31
|
+
insertToGlobalFiles?: boolean,
|
32
|
+
) => {
|
30
33
|
const result = await this.db.transaction(async (trx) => {
|
34
|
+
if (insertToGlobalFiles) {
|
35
|
+
await trx.insert(globalFiles).values({
|
36
|
+
fileType: params.fileType,
|
37
|
+
hashId: params.fileHash!,
|
38
|
+
metadata: params.metadata,
|
39
|
+
size: params.size,
|
40
|
+
url: params.url,
|
41
|
+
});
|
42
|
+
}
|
43
|
+
|
31
44
|
const result = await trx
|
32
45
|
.insert(files)
|
33
46
|
.values({ ...params, userId: this.userId })
|
@@ -38,26 +38,19 @@ export const fileRouter = router({
|
|
38
38
|
.mutation(async ({ ctx, input }) => {
|
39
39
|
const { isExist } = await ctx.fileModel.checkHash(input.hash!);
|
40
40
|
|
41
|
-
|
42
|
-
|
43
|
-
|
41
|
+
const { id } = await ctx.fileModel.create(
|
42
|
+
{
|
43
|
+
fileHash: input.hash,
|
44
44
|
fileType: input.fileType,
|
45
|
-
|
45
|
+
knowledgeBaseId: input.knowledgeBaseId,
|
46
46
|
metadata: input.metadata,
|
47
|
+
name: input.name,
|
47
48
|
size: input.size,
|
48
49
|
url: input.url,
|
49
|
-
}
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
fileHash: input.hash,
|
54
|
-
fileType: input.fileType,
|
55
|
-
knowledgeBaseId: input.knowledgeBaseId,
|
56
|
-
metadata: input.metadata,
|
57
|
-
name: input.name,
|
58
|
-
size: input.size,
|
59
|
-
url: input.url,
|
60
|
-
});
|
50
|
+
},
|
51
|
+
// if the file is not exist in global file, create a new one
|
52
|
+
!isExist,
|
53
|
+
);
|
61
54
|
|
62
55
|
return { id, url: await getFullFileUrl(input.url) };
|
63
56
|
}),
|