@lobehub/lobehub 2.0.0-next.135 → 2.0.0-next.137
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/apps/desktop/package.json +4 -4
- package/changelog/v1.json +18 -0
- package/docs/development/database-schema.dbml +5 -0
- package/e2e/package.json +2 -2
- package/package.json +44 -44
- package/packages/database/migrations/0052_topic_and_messages.sql +9 -0
- package/packages/database/migrations/meta/0052_snapshot.json +8850 -0
- package/packages/database/migrations/meta/_journal.json +7 -0
- package/packages/database/src/core/migrations.json +15 -0
- package/packages/database/src/models/__tests__/topic.test.ts +12 -6
- package/packages/database/src/schemas/message.ts +5 -2
- package/packages/database/src/schemas/topic.ts +5 -0
- package/packages/model-runtime/package.json +2 -2
- package/packages/model-runtime/src/core/openaiCompatibleFactory/index.ts +33 -12
- package/packages/python-interpreter/src/worker.ts +18 -18
- package/packages/web-crawler/package.json +1 -1
- package/src/app/[variants]/(main)/settings/provider/(list)/ProviderGrid/index.tsx +19 -0
- package/src/app/[variants]/(main)/settings/provider/ProviderMenu/List.tsx +200 -73
- package/src/app/[variants]/(main)/settings/provider/detail/default/ClientMode.tsx +1 -1
- package/src/locales/default/modelProvider.ts +2 -0
- package/src/services/chat/chat.test.ts +2 -1
- package/src/services/chat/index.ts +7 -4
- package/src/store/aiInfra/slices/aiProvider/__tests__/selectors.test.ts +15 -7
- package/src/store/aiInfra/slices/aiProvider/selectors.ts +6 -2
|
@@ -364,6 +364,13 @@
|
|
|
364
364
|
"when": 1764335703306,
|
|
365
365
|
"tag": "0051_add_market_into_user_settings",
|
|
366
366
|
"breakpoints": true
|
|
367
|
+
},
|
|
368
|
+
{
|
|
369
|
+
"idx": 52,
|
|
370
|
+
"version": "7",
|
|
371
|
+
"when": 1764500630663,
|
|
372
|
+
"tag": "0052_topic_and_messages",
|
|
373
|
+
"breakpoints": true
|
|
367
374
|
}
|
|
368
375
|
],
|
|
369
376
|
"version": "6"
|
|
@@ -844,5 +844,20 @@
|
|
|
844
844
|
"bps": true,
|
|
845
845
|
"folderMillis": 1764335703306,
|
|
846
846
|
"hash": "28c0d738c0b1fdf5fd871363be1a1477b4accbabdc140fe8dc6e9b339aae2c89"
|
|
847
|
+
},
|
|
848
|
+
{
|
|
849
|
+
"sql": [
|
|
850
|
+
"ALTER TABLE \"messages\" DROP CONSTRAINT \"messages_agent_id_agents_id_fk\";\n",
|
|
851
|
+
"\nALTER TABLE \"messages\" ADD COLUMN IF NOT EXISTS \"editor_data\" jsonb;",
|
|
852
|
+
"\nALTER TABLE \"topics\" ADD COLUMN IF NOT EXISTS \"content\" text;",
|
|
853
|
+
"\nALTER TABLE \"topics\" ADD COLUMN IF NOT EXISTS \"editor_data\" jsonb;",
|
|
854
|
+
"\nALTER TABLE \"topics\" ADD COLUMN IF NOT EXISTS \"agent_id\" text;",
|
|
855
|
+
"\nALTER TABLE \"messages\" ADD CONSTRAINT \"messages_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;",
|
|
856
|
+
"\nALTER TABLE \"topics\" ADD CONSTRAINT \"topics_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;",
|
|
857
|
+
"\nCREATE INDEX IF NOT EXISTS \"topics_agent_id_idx\" ON \"topics\" USING btree (\"agent_id\");\n"
|
|
858
|
+
],
|
|
859
|
+
"bps": true,
|
|
860
|
+
"folderMillis": 1764500630663,
|
|
861
|
+
"hash": "94721bc06910a456a4756c9b0c27ef5d7ff55b7ea8c772acf58052c0155c693b"
|
|
847
862
|
}
|
|
848
863
|
]
|
|
@@ -366,9 +366,9 @@ describe('TopicModel', () => {
|
|
|
366
366
|
await topicModel.batchDeleteByGroupId('group1');
|
|
367
367
|
|
|
368
368
|
// 断言属于 group1 的 topics 都被删除了
|
|
369
|
-
expect(
|
|
370
|
-
|
|
371
|
-
)
|
|
369
|
+
expect(await serverDB.select().from(topics).where(eq(topics.groupId, 'group1'))).toHaveLength(
|
|
370
|
+
0,
|
|
371
|
+
);
|
|
372
372
|
expect(await serverDB.select().from(topics)).toHaveLength(2);
|
|
373
373
|
});
|
|
374
374
|
|
|
@@ -385,9 +385,9 @@ describe('TopicModel', () => {
|
|
|
385
385
|
await topicModel.batchDeleteByGroupId();
|
|
386
386
|
|
|
387
387
|
// 断言属于 group1 的 topics 都被删除了
|
|
388
|
-
expect(
|
|
389
|
-
|
|
390
|
-
)
|
|
388
|
+
expect(await serverDB.select().from(topics).where(eq(topics.groupId, 'group1'))).toHaveLength(
|
|
389
|
+
2,
|
|
390
|
+
);
|
|
391
391
|
expect(await serverDB.select().from(topics)).toHaveLength(2);
|
|
392
392
|
});
|
|
393
393
|
});
|
|
@@ -506,6 +506,9 @@ describe('TopicModel', () => {
|
|
|
506
506
|
metadata: null,
|
|
507
507
|
groupId: null,
|
|
508
508
|
clientId: null,
|
|
509
|
+
agentId: null,
|
|
510
|
+
content: null,
|
|
511
|
+
editorData: null,
|
|
509
512
|
createdAt: expect.any(Date),
|
|
510
513
|
updatedAt: expect.any(Date),
|
|
511
514
|
accessedAt: expect.any(Date),
|
|
@@ -551,6 +554,9 @@ describe('TopicModel', () => {
|
|
|
551
554
|
title: 'New Topic',
|
|
552
555
|
favorite: false,
|
|
553
556
|
clientId: null,
|
|
557
|
+
agentId: null,
|
|
558
|
+
content: null,
|
|
559
|
+
editorData: null,
|
|
554
560
|
groupId: null,
|
|
555
561
|
historySummary: null,
|
|
556
562
|
metadata: null,
|
|
@@ -83,6 +83,7 @@ export const messages = pgTable(
|
|
|
83
83
|
|
|
84
84
|
role: varchar255('role').notNull(),
|
|
85
85
|
content: text('content'),
|
|
86
|
+
editorData: jsonb('editor_data'),
|
|
86
87
|
reasoning: jsonb('reasoning').$type<ModelReasoning>(),
|
|
87
88
|
search: jsonb('search').$type<GroundingSearch>(),
|
|
88
89
|
metadata: jsonb('metadata'),
|
|
@@ -104,6 +105,9 @@ export const messages = pgTable(
|
|
|
104
105
|
userId: text('user_id')
|
|
105
106
|
.references(() => users.id, { onDelete: 'cascade' })
|
|
106
107
|
.notNull(),
|
|
108
|
+
/**
|
|
109
|
+
* we might deprecate sessionId in the future
|
|
110
|
+
*/
|
|
107
111
|
sessionId: text('session_id').references(() => sessions.id, { onDelete: 'cascade' }),
|
|
108
112
|
topicId: text('topic_id').references(() => topics.id, { onDelete: 'cascade' }),
|
|
109
113
|
threadId: text('thread_id').references(() => threads.id, { onDelete: 'cascade' }),
|
|
@@ -111,8 +115,7 @@ export const messages = pgTable(
|
|
|
111
115
|
parentId: text('parent_id').references(() => messages.id, { onDelete: 'set null' }),
|
|
112
116
|
quotaId: text('quota_id').references(() => messages.id, { onDelete: 'set null' }),
|
|
113
117
|
|
|
114
|
-
|
|
115
|
-
agentId: text('agent_id').references(() => agents.id, { onDelete: 'set null' }),
|
|
118
|
+
agentId: text('agent_id').references(() => agents.id, { onDelete: 'cascade' }),
|
|
116
119
|
groupId: text('group_id').references(() => chatGroups.id, { onDelete: 'set null' }),
|
|
117
120
|
// targetId can be an agent ID, "user", or null - no FK constraint
|
|
118
121
|
targetId: text('target_id'),
|
|
@@ -5,6 +5,7 @@ import { createInsertSchema } from 'drizzle-zod';
|
|
|
5
5
|
|
|
6
6
|
import { idGenerator } from '../utils/idGenerator';
|
|
7
7
|
import { createdAt, timestamps, timestamptz } from './_helpers';
|
|
8
|
+
import { agents } from './agent';
|
|
8
9
|
import { chatGroups } from './chatGroup';
|
|
9
10
|
import { documents } from './file';
|
|
10
11
|
import { sessions } from './session';
|
|
@@ -19,6 +20,9 @@ export const topics = pgTable(
|
|
|
19
20
|
title: text('title'),
|
|
20
21
|
favorite: boolean('favorite').default(false),
|
|
21
22
|
sessionId: text('session_id').references(() => sessions.id, { onDelete: 'cascade' }),
|
|
23
|
+
content: text('content'),
|
|
24
|
+
editorData: jsonb('editor_data'),
|
|
25
|
+
agentId: text('agent_id').references(() => agents.id, { onDelete: 'cascade' }),
|
|
22
26
|
groupId: text('group_id').references(() => chatGroups.id, { onDelete: 'cascade' }),
|
|
23
27
|
userId: text('user_id')
|
|
24
28
|
.references(() => users.id, { onDelete: 'cascade' })
|
|
@@ -34,6 +38,7 @@ export const topics = pgTable(
|
|
|
34
38
|
index('topics_id_user_id_idx').on(t.id, t.userId),
|
|
35
39
|
index('topics_session_id_idx').on(t.sessionId),
|
|
36
40
|
index('topics_group_id_idx').on(t.groupId),
|
|
41
|
+
index('topics_agent_id_idx').on(t.agentId),
|
|
37
42
|
],
|
|
38
43
|
);
|
|
39
44
|
|
|
@@ -12,8 +12,8 @@
|
|
|
12
12
|
"test:update": "vitest -u"
|
|
13
13
|
},
|
|
14
14
|
"dependencies": {
|
|
15
|
-
"@aws-sdk/client-bedrock-runtime": "^3.
|
|
16
|
-
"@huggingface/inference": "^4.13.
|
|
15
|
+
"@aws-sdk/client-bedrock-runtime": "^3.941.0",
|
|
16
|
+
"@huggingface/inference": "^4.13.4",
|
|
17
17
|
"@lobechat/const": "workspace:*",
|
|
18
18
|
"@lobechat/types": "workspace:*",
|
|
19
19
|
"@lobechat/utils": "workspace:*",
|
|
@@ -235,18 +235,45 @@ export const createOpenAICompatibleRuntime = <T extends Record<string, any> = an
|
|
|
235
235
|
|
|
236
236
|
const log = debug(`${this.logPrefix}:shouldUseResponsesAPI`);
|
|
237
237
|
|
|
238
|
-
// Priority
|
|
239
|
-
|
|
240
|
-
|
|
238
|
+
// Priority 0: Check built-in responsesAPIModels FIRST (highest priority)
|
|
239
|
+
// These models MUST use Responses API regardless of user settings
|
|
240
|
+
if (model && responsesAPIModels.has(model)) {
|
|
241
|
+
log('using Responses API: model %s in built-in responsesAPIModels (forced)', model);
|
|
241
242
|
return true;
|
|
242
243
|
}
|
|
243
244
|
|
|
244
|
-
// Priority
|
|
245
|
-
if (userApiMode
|
|
245
|
+
// Priority 1: userApiMode is explicitly set to 'chatCompletion' (user disabled the switch)
|
|
246
|
+
if (userApiMode === 'chatCompletion') {
|
|
246
247
|
log('using Chat Completions API: userApiMode=%s', userApiMode);
|
|
247
248
|
return false;
|
|
248
249
|
}
|
|
249
250
|
|
|
251
|
+
// Priority 2: When user enables the switch (userApiMode === 'responses')
|
|
252
|
+
// Check if useResponseModels is configured - if so, only matching models use Responses API
|
|
253
|
+
// If useResponseModels is not configured, all models use Responses API
|
|
254
|
+
if (userApiMode === 'responses') {
|
|
255
|
+
if (model && flagUseResponseModels?.length) {
|
|
256
|
+
const matches = flagUseResponseModels.some((m: string | RegExp) =>
|
|
257
|
+
typeof m === 'string' ? model.includes(m) : (m as RegExp).test(model),
|
|
258
|
+
);
|
|
259
|
+
if (matches) {
|
|
260
|
+
log(
|
|
261
|
+
'using Responses API: userApiMode=responses and model %s matches useResponseModels',
|
|
262
|
+
model,
|
|
263
|
+
);
|
|
264
|
+
return true;
|
|
265
|
+
}
|
|
266
|
+
log(
|
|
267
|
+
'using Chat Completions API: userApiMode=responses but model %s does not match useResponseModels',
|
|
268
|
+
model,
|
|
269
|
+
);
|
|
270
|
+
return false;
|
|
271
|
+
}
|
|
272
|
+
// No useResponseModels configured, use Responses API for all models
|
|
273
|
+
log('using Responses API: userApiMode=responses (no useResponseModels filter)');
|
|
274
|
+
return true;
|
|
275
|
+
}
|
|
276
|
+
|
|
250
277
|
// Priority 3: Explicit responseApi flag
|
|
251
278
|
if (responseApi) {
|
|
252
279
|
log('using Responses API: explicit responseApi flag for %s', context);
|
|
@@ -259,7 +286,7 @@ export const createOpenAICompatibleRuntime = <T extends Record<string, any> = an
|
|
|
259
286
|
return true;
|
|
260
287
|
}
|
|
261
288
|
|
|
262
|
-
// Priority 5: Check if model matches useResponseModels patterns
|
|
289
|
+
// Priority 5: Check if model matches useResponseModels patterns (without user switch)
|
|
263
290
|
if (model && flagUseResponseModels?.length) {
|
|
264
291
|
const matches = flagUseResponseModels.some((m: string | RegExp) =>
|
|
265
292
|
typeof m === 'string' ? model.includes(m) : (m as RegExp).test(model),
|
|
@@ -270,12 +297,6 @@ export const createOpenAICompatibleRuntime = <T extends Record<string, any> = an
|
|
|
270
297
|
}
|
|
271
298
|
}
|
|
272
299
|
|
|
273
|
-
// Priority 6: Check built-in responsesAPIModels
|
|
274
|
-
if (model && responsesAPIModels.has(model)) {
|
|
275
|
-
log('using Responses API: model %s in built-in responsesAPIModels', model);
|
|
276
|
-
return true;
|
|
277
|
-
}
|
|
278
|
-
|
|
279
300
|
log('using Chat Completions API for %s', context);
|
|
280
301
|
return false;
|
|
281
302
|
}
|
|
@@ -32,7 +32,7 @@ def patch_matplotlib():
|
|
|
32
32
|
|
|
33
33
|
patch_matplotlib()`;
|
|
34
34
|
|
|
35
|
-
// Pyodide
|
|
35
|
+
// Pyodide object cannot be transferred between Workers, so it's defined as a global variable
|
|
36
36
|
let pyodide: PyodideAPI | undefined;
|
|
37
37
|
|
|
38
38
|
class PythonWorker {
|
|
@@ -56,7 +56,7 @@ class PythonWorker {
|
|
|
56
56
|
}
|
|
57
57
|
|
|
58
58
|
/**
|
|
59
|
-
*
|
|
59
|
+
* Initialize Python interpreter
|
|
60
60
|
*/
|
|
61
61
|
async init() {
|
|
62
62
|
pyodide = await globalThis.loadPyodide({
|
|
@@ -67,13 +67,13 @@ class PythonWorker {
|
|
|
67
67
|
}
|
|
68
68
|
|
|
69
69
|
/**
|
|
70
|
-
*
|
|
71
|
-
* @param files
|
|
70
|
+
* Upload files to the interpreter environment
|
|
71
|
+
* @param files File list
|
|
72
72
|
*/
|
|
73
73
|
async uploadFiles(files: File[]) {
|
|
74
74
|
for (const file of files) {
|
|
75
75
|
const content = new Uint8Array(await file.arrayBuffer());
|
|
76
|
-
// TODO:
|
|
76
|
+
// TODO: Consider using WORKERFS here to reduce one copy operation
|
|
77
77
|
if (file.name.startsWith('/')) {
|
|
78
78
|
this.pyodide.FS.writeFile(file.name, content);
|
|
79
79
|
} else {
|
|
@@ -84,15 +84,15 @@ class PythonWorker {
|
|
|
84
84
|
}
|
|
85
85
|
|
|
86
86
|
/**
|
|
87
|
-
*
|
|
88
|
-
* @param files
|
|
87
|
+
* Download modified files from the interpreter environment
|
|
88
|
+
* @param files File list
|
|
89
89
|
*/
|
|
90
90
|
async downloadFiles() {
|
|
91
91
|
const result: File[] = [];
|
|
92
92
|
for (const entry of this.pyodide.FS.readdir('/mnt/data')) {
|
|
93
93
|
if (entry === '.' || entry === '..') continue;
|
|
94
94
|
const filePath = `/mnt/data/${entry}`;
|
|
95
|
-
// pyodide
|
|
95
|
+
// pyodide's FS type definition has issues, have to use any
|
|
96
96
|
const content = (this.pyodide.FS as any).readFile(filePath, { encoding: 'binary' });
|
|
97
97
|
const blob = new Blob([content]);
|
|
98
98
|
const file = new File([blob], filePath);
|
|
@@ -104,8 +104,8 @@ class PythonWorker {
|
|
|
104
104
|
}
|
|
105
105
|
|
|
106
106
|
/**
|
|
107
|
-
*
|
|
108
|
-
* @param packages
|
|
107
|
+
* Install Python packages
|
|
108
|
+
* @param packages Package name list
|
|
109
109
|
*/
|
|
110
110
|
async installPackages(packages: string[]) {
|
|
111
111
|
await this.pyodide.loadPackage('micropip');
|
|
@@ -115,16 +115,16 @@ class PythonWorker {
|
|
|
115
115
|
}
|
|
116
116
|
|
|
117
117
|
/**
|
|
118
|
-
*
|
|
119
|
-
* @param code
|
|
118
|
+
* Execute Python code
|
|
119
|
+
* @param code Code
|
|
120
120
|
*/
|
|
121
121
|
async runPython(code: string): Promise<PythonResult> {
|
|
122
122
|
await this.patchFonts();
|
|
123
|
-
// NOTE: loadPackagesFromImports
|
|
123
|
+
// NOTE: loadPackagesFromImports only processes official pyodide packages
|
|
124
124
|
await this.pyodide.loadPackagesFromImports(code);
|
|
125
125
|
await this.patchPackages();
|
|
126
126
|
|
|
127
|
-
//
|
|
127
|
+
// Capture standard output after installing dependencies to avoid logging installation messages
|
|
128
128
|
const output: PythonOutput[] = [];
|
|
129
129
|
this.pyodide.setStdout({
|
|
130
130
|
batched: (o: string) => {
|
|
@@ -137,7 +137,7 @@ class PythonWorker {
|
|
|
137
137
|
},
|
|
138
138
|
});
|
|
139
139
|
|
|
140
|
-
//
|
|
140
|
+
// Execute code
|
|
141
141
|
let result;
|
|
142
142
|
let success = false;
|
|
143
143
|
try {
|
|
@@ -172,15 +172,15 @@ class PythonWorker {
|
|
|
172
172
|
};
|
|
173
173
|
for (const [filename, url] of Object.entries(fontFiles)) {
|
|
174
174
|
const buffer = await fetch(url, { cache: 'force-cache' }).then((res) => res.arrayBuffer());
|
|
175
|
-
// NOTE:
|
|
175
|
+
// NOTE: In theory, createLazyFile would be better here, but it causes errors in pyodide
|
|
176
176
|
this.pyodide.FS.writeFile(`/usr/share/fonts/truetype/${filename}`, new Uint8Array(buffer));
|
|
177
177
|
}
|
|
178
178
|
}
|
|
179
179
|
|
|
180
180
|
private async isNewFile(file: File) {
|
|
181
181
|
const isSameFile = async (a: File, b: File) => {
|
|
182
|
-
// a
|
|
183
|
-
// b
|
|
182
|
+
// a is the passed-in file, may use absolute or relative path
|
|
183
|
+
// b is the file in the interpreter environment, uses absolute path
|
|
184
184
|
if (a.name.startsWith('/')) {
|
|
185
185
|
if (a.name !== b.name) return false;
|
|
186
186
|
} else {
|
|
@@ -23,6 +23,10 @@ const List = memo((props: ListProps) => {
|
|
|
23
23
|
const { t } = useTranslation('modelProvider');
|
|
24
24
|
const enabledList = useAiInfraStore(aiProviderSelectors.enabledAiProviderList, isEqual);
|
|
25
25
|
const disabledList = useAiInfraStore(aiProviderSelectors.disabledAiProviderList, isEqual);
|
|
26
|
+
const disabledCustomList = useAiInfraStore(
|
|
27
|
+
aiProviderSelectors.disabledCustomAiProviderList,
|
|
28
|
+
isEqual,
|
|
29
|
+
);
|
|
26
30
|
const [initAiProviderList] = useAiInfraStore((s) => [s.initAiProviderList]);
|
|
27
31
|
|
|
28
32
|
if (!initAiProviderList)
|
|
@@ -63,6 +67,21 @@ const List = memo((props: ListProps) => {
|
|
|
63
67
|
))}
|
|
64
68
|
</Grid>
|
|
65
69
|
</Flexbox>
|
|
70
|
+
{disabledCustomList.length > 0 && (
|
|
71
|
+
<Flexbox gap={24}>
|
|
72
|
+
<Flexbox align={'center'} gap={8} horizontal>
|
|
73
|
+
<Text strong style={{ fontSize: 18 }}>
|
|
74
|
+
{t('list.title.custom')}
|
|
75
|
+
</Text>
|
|
76
|
+
<Tag>{disabledCustomList.length}</Tag>
|
|
77
|
+
</Flexbox>
|
|
78
|
+
<Grid gap={16} rows={3}>
|
|
79
|
+
{disabledCustomList.map((item) => (
|
|
80
|
+
<Card {...item} key={item.id} onProviderSelect={onProviderSelect} />
|
|
81
|
+
))}
|
|
82
|
+
</Grid>
|
|
83
|
+
</Flexbox>
|
|
84
|
+
)}
|
|
66
85
|
<Flexbox gap={24}>
|
|
67
86
|
<Flexbox align={'center'} gap={8} horizontal>
|
|
68
87
|
<Text strong style={{ fontSize: 18 }}>
|