@lobehub/chat 1.65.0 → 1.65.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +51 -0
  2. package/changelog/v1.json +18 -0
  3. package/docker-compose/local/docker-compose.yml +14 -0
  4. package/docker-compose/local/searxng-settings.yml +2582 -0
  5. package/docker-compose/setup.sh +3 -1
  6. package/docs/self-hosting/advanced/model-list.mdx +4 -2
  7. package/docs/self-hosting/advanced/model-list.zh-CN.mdx +4 -2
  8. package/package.json +7 -7
  9. package/src/app/(backend)/middleware/auth/index.ts +6 -0
  10. package/src/config/aiModels/google.ts +3 -3
  11. package/src/config/aiModels/groq.ts +10 -0
  12. package/src/config/aiModels/qwen.ts +43 -26
  13. package/src/const/message.ts +3 -0
  14. package/src/features/Conversation/Messages/Assistant/Tool/Render/CustomRender.tsx +7 -7
  15. package/src/features/MobileSwitchLoading/index.tsx +0 -1
  16. package/src/libs/agent-runtime/google/index.test.ts +8 -0
  17. package/src/libs/agent-runtime/google/index.ts +18 -5
  18. package/src/libs/agent-runtime/types/chat.ts +9 -1
  19. package/src/libs/agent-runtime/utils/anthropicHelpers.test.ts +113 -0
  20. package/src/libs/agent-runtime/utils/anthropicHelpers.ts +7 -4
  21. package/src/libs/agent-runtime/utils/streams/anthropic.test.ts +339 -94
  22. package/src/libs/agent-runtime/utils/streams/anthropic.ts +54 -34
  23. package/src/libs/agent-runtime/utils/streams/openai.test.ts +181 -0
  24. package/src/libs/agent-runtime/utils/streams/openai.ts +40 -30
  25. package/src/libs/agent-runtime/utils/streams/protocol.ts +4 -0
  26. package/src/services/__tests__/chat.test.ts +89 -50
  27. package/src/services/chat.ts +13 -1
  28. package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +1 -1
  29. package/src/types/message/base.ts +1 -0
  30. package/src/utils/fetch/__tests__/fetchSSE.test.ts +113 -10
  31. package/src/utils/fetch/fetchSSE.ts +12 -3
  32. package/src/utils/parseModels.test.ts +14 -0
  33. package/src/utils/parseModels.ts +4 -0
@@ -9,6 +9,7 @@ import {
9
9
  MessageToolCall,
10
10
  MessageToolCallChunk,
11
11
  MessageToolCallSchema,
12
+ ModelReasoning,
12
13
  } from '@/types/message';
13
14
  import { GroundingSearch } from '@/types/search';
14
15
 
@@ -23,7 +24,7 @@ export type OnFinishHandler = (
23
24
  context: {
24
25
  grounding?: GroundingSearch;
25
26
  observationId?: string | null;
26
- reasoning?: string;
27
+ reasoning?: ModelReasoning;
27
28
  toolCalls?: MessageToolCall[];
28
29
  traceId?: string | null;
29
30
  type?: SSEFinishType;
@@ -36,7 +37,8 @@ export interface MessageTextChunk {
36
37
  }
37
38
 
38
39
  export interface MessageReasoningChunk {
39
- text: string;
40
+ signature?: string;
41
+ text?: string;
40
42
  type: 'reasoning';
41
43
  }
42
44
 
@@ -271,6 +273,8 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
271
273
  });
272
274
 
273
275
  let thinking = '';
276
+ let thinkingSignature: string | undefined;
277
+
274
278
  const thinkingController = createSmoothMessage({
275
279
  onTextUpdate: (delta, text) => {
276
280
  thinking = text;
@@ -365,6 +369,11 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
365
369
  break;
366
370
  }
367
371
 
372
+ case 'reasoning_signature': {
373
+ thinkingSignature = data;
374
+ break;
375
+ }
376
+
368
377
  case 'reasoning': {
369
378
  if (textSmoothing) {
370
379
  thinkingController.pushToQueue(data);
@@ -436,7 +445,7 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
436
445
  await options?.onFinish?.(output, {
437
446
  grounding,
438
447
  observationId,
439
- reasoning: !!thinking ? thinking : undefined,
448
+ reasoning: !!thinking ? { content: thinking, signature: thinkingSignature } : undefined,
440
449
  toolCalls,
441
450
  traceId,
442
451
  type: finishedType,
@@ -72,6 +72,20 @@ describe('parseModelString', () => {
72
72
  });
73
73
  });
74
74
 
75
+ it('token and search', () => {
76
+ const result = parseModelString('qwen-max-latest=Qwen Max<32768:search>');
77
+
78
+ expect(result.add[0]).toEqual({
79
+ displayName: 'Qwen Max',
80
+ abilities: {
81
+ search: true,
82
+ },
83
+ id: 'qwen-max-latest',
84
+ contextWindowTokens: 32_768,
85
+ type: 'chat',
86
+ });
87
+ });
88
+
75
89
  it('multi models', () => {
76
90
  const result = parseModelString(
77
91
  'gemini-1.5-flash-latest=Gemini 1.5 Flash<16000:vision>,gpt-4-all=ChatGPT Plus<128000:fc:vision:file>',
@@ -80,6 +80,10 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
80
80
  model.abilities!.files = true;
81
81
  break;
82
82
  }
83
+ case 'search': {
84
+ model.abilities!.search = true;
85
+ break;
86
+ }
83
87
  default: {
84
88
  console.warn(`Unknown capability: ${capability}`);
85
89
  }