@lobehub/chat 1.134.2 → 1.134.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,31 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.134.3](https://github.com/lobehub/lobe-chat/compare/v1.134.2...v1.134.3)
6
+
7
+ <sup>Released on **2025-10-05**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Type not preserved when model is sorted.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Type not preserved when model is sorted, closes [#9561](https://github.com/lobehub/lobe-chat/issues/9561) ([5fe2518](https://github.com/lobehub/lobe-chat/commit/5fe2518))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
5
30
  ### [Version 1.134.2](https://github.com/lobehub/lobe-chat/compare/v1.134.1...v1.134.2)
6
31
 
7
32
  <sup>Released on **2025-10-05**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,13 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "fixes": [
5
+ "Type not preserved when model is sorted."
6
+ ]
7
+ },
8
+ "date": "2025-10-05",
9
+ "version": "1.134.3"
10
+ },
2
11
  {
3
12
  "children": {
4
13
  "improvements": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.134.2",
3
+ "version": "1.134.3",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -315,5 +315,14 @@ describe('AiModelModel', () => {
315
315
  expect(models[0].id).toBe('model2');
316
316
  expect(models[1].id).toBe('model1');
317
317
  });
318
+
319
+ it('should preserve model type when inserting order records', async () => {
320
+ const sortMap = [{ id: 'image-model', sort: 0, type: 'image' as const }];
321
+
322
+ await aiProviderModel.updateModelsOrder('openai', sortMap);
323
+
324
+ const model = await aiProviderModel.findById('image-model');
325
+ expect(model?.type).toBe('image');
326
+ });
318
327
  });
319
328
  });
@@ -223,20 +223,32 @@ export class AiModelModel {
223
223
 
224
224
  updateModelsOrder = async (providerId: string, sortMap: AiModelSortMap[]) => {
225
225
  await this.db.transaction(async (tx) => {
226
- const updates = sortMap.map(({ id, sort }) => {
226
+ const updates = sortMap.map(({ id, sort, type }) => {
227
+ const now = new Date();
228
+ const insertValues: typeof aiModels.$inferInsert = {
229
+ enabled: true,
230
+ id,
231
+ providerId,
232
+ sort,
233
+ // source: isBuiltin ? 'builtin' : 'custom',
234
+ updatedAt: now,
235
+ userId: this.userId,
236
+ };
237
+
238
+ if (type) insertValues.type = type;
239
+
240
+ const updateValues: Partial<typeof aiModels.$inferInsert> = {
241
+ sort,
242
+ updatedAt: now,
243
+ };
244
+
245
+ if (type) updateValues.type = type;
246
+
227
247
  return tx
228
248
  .insert(aiModels)
229
- .values({
230
- enabled: true,
231
- id,
232
- providerId,
233
- sort,
234
- // source: isBuiltin ? 'builtin' : 'custom',
235
- updatedAt: new Date(),
236
- userId: this.userId,
237
- })
249
+ .values(insertValues)
238
250
  .onConflictDoUpdate({
239
- set: { sort, updatedAt: new Date() },
251
+ set: updateValues,
240
252
  target: [aiModels.id, aiModels.userId, aiModels.providerId],
241
253
  });
242
254
  });
@@ -371,6 +371,7 @@ export type UpdateAiModelParams = z.infer<typeof UpdateAiModelSchema>;
371
371
  export interface AiModelSortMap {
372
372
  id: string;
373
373
  sort: number;
374
+ type?: AiModelType;
374
375
  }
375
376
 
376
377
  export const ToggleAiModelEnableSchema = z.object({
@@ -75,9 +75,10 @@ const getThinkingModelCategory = (model?: string): ThinkingModelCategory => {
75
75
  const normalized = model.toLowerCase();
76
76
 
77
77
  if (normalized.includes('robotics-er-1.5-preview')) return 'robotics';
78
- if (normalized.includes('-2.5-flash-lite')) return 'flashLite';
79
- if (normalized.includes('-2.5-flash')) return 'flash';
80
- if (normalized.includes('-2.5-pro')) return 'pro';
78
+ if (normalized.includes('-2.5-flash-lite') || normalized.includes('flash-lite-latest'))
79
+ return 'flashLite';
80
+ if (normalized.includes('-2.5-flash') || normalized.includes('flash-latest')) return 'flash';
81
+ if (normalized.includes('-2.5-pro') || normalized.includes('pro-latest')) return 'pro';
81
82
 
82
83
  return 'other';
83
84
  };
@@ -6,8 +6,8 @@ import { useTranslation } from 'react-i18next';
6
6
  import { Flexbox } from 'react-layout-kit';
7
7
 
8
8
  import { useAiInfraStore } from '@/store/aiInfra';
9
- import { AiProviderModelListItem } from '../../../../../../../../../packages/model-bank/src/types/aiModel';
10
9
 
10
+ import { AiProviderModelListItem } from '../../../../../../../../../packages/model-bank/src/types/aiModel';
11
11
  import ListItem from './ListItem';
12
12
 
13
13
  const useStyles = createStyles(({ css, token }) => ({
@@ -76,6 +76,7 @@ const SortModelModal = memo<SortModelModalProps>(({ open, onCancel, defaultItems
76
76
  const sortMap = items.map((item, index) => ({
77
77
  id: item.id,
78
78
  sort: index,
79
+ type: item.type,
79
80
  }));
80
81
 
81
82
  setLoading(true);
@@ -7,13 +7,15 @@ import { authedProcedure, router } from '@/libs/trpc/lambda';
7
7
  import { serverDatabase } from '@/libs/trpc/lambda/middleware';
8
8
  import { getServerGlobalConfig } from '@/server/globalConfig';
9
9
  import { KeyVaultsGateKeeper } from '@/server/modules/KeyVaultsEncrypt';
10
+ import { ProviderConfig } from '@/types/user/settings';
11
+
10
12
  import {
13
+ AiModelTypeSchema,
11
14
  AiProviderModelListItem,
12
15
  CreateAiModelSchema,
13
16
  ToggleAiModelEnableSchema,
14
17
  UpdateAiModelSchema,
15
18
  } from '../../../../packages/model-bank/src/types/aiModel';
16
- import { ProviderConfig } from '@/types/user/settings';
17
19
 
18
20
  const aiModelProcedure = authedProcedure.use(serverDatabase).use(async (opts) => {
19
21
  const { ctx } = opts;
@@ -121,6 +123,7 @@ export const aiModelRouter = router({
121
123
  z.object({
122
124
  id: z.string(),
123
125
  sort: z.number(),
126
+ type: AiModelTypeSchema.optional(),
124
127
  }),
125
128
  ),
126
129
  }),