@lobehub/chat 1.69.1 → 1.69.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +50 -0
  2. package/changelog/v1.json +18 -0
  3. package/locales/ar/models.json +9 -0
  4. package/locales/bg-BG/models.json +9 -0
  5. package/locales/de-DE/models.json +9 -0
  6. package/locales/en-US/models.json +9 -0
  7. package/locales/es-ES/models.json +9 -0
  8. package/locales/fa-IR/models.json +9 -0
  9. package/locales/fr-FR/models.json +9 -0
  10. package/locales/it-IT/models.json +9 -0
  11. package/locales/ja-JP/models.json +9 -0
  12. package/locales/ko-KR/models.json +9 -0
  13. package/locales/nl-NL/models.json +9 -0
  14. package/locales/pl-PL/models.json +9 -0
  15. package/locales/pt-BR/models.json +9 -0
  16. package/locales/ru-RU/models.json +9 -0
  17. package/locales/tr-TR/models.json +9 -0
  18. package/locales/vi-VN/models.json +9 -0
  19. package/locales/zh-CN/models.json +9 -0
  20. package/locales/zh-TW/models.json +9 -0
  21. package/package.json +3 -3
  22. package/src/app/[variants]/(auth)/next-auth/signin/AuthSignInBox.tsx +161 -0
  23. package/src/app/[variants]/(auth)/next-auth/signin/page.tsx +11 -0
  24. package/src/app/[variants]/(main)/profile/(home)/features/SSOProvidersList/index.tsx +1 -1
  25. package/src/{app/[variants]/(main)/profile/(home)/features/SSOProvidersList → components/NextAuth}/AuthIcons.tsx +8 -6
  26. package/src/libs/agent-runtime/UniformRuntime/index.ts +114 -0
  27. package/src/libs/agent-runtime/anthropic/handleAnthropicError.ts +15 -0
  28. package/src/libs/agent-runtime/anthropic/index.test.ts +10 -1
  29. package/src/libs/agent-runtime/anthropic/index.ts +58 -40
  30. package/src/libs/agent-runtime/azureai/index.ts +7 -1
  31. package/src/libs/agent-runtime/github/index.ts +20 -25
  32. package/src/libs/agent-runtime/index.ts +2 -0
  33. package/src/libs/agent-runtime/openai/index.ts +2 -22
  34. package/src/libs/agent-runtime/types/type.ts +1 -1
  35. package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.ts +11 -7
  36. package/src/libs/agent-runtime/utils/openaiHelpers.ts +22 -0
  37. package/src/libs/next-auth/auth.config.ts +1 -0
  38. package/src/middleware.ts +1 -1
@@ -13,7 +13,7 @@ export interface ChatCompletionErrorPayload {
13
13
  endpoint?: string;
14
14
  error: object;
15
15
  errorType: ILobeAgentRuntimeErrorType;
16
- provider: ModelProvider;
16
+ provider: string;
17
17
  }
18
18
 
19
19
  export interface CreateChatCompletionOptions {
@@ -168,6 +168,8 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
168
168
  return class LobeOpenAICompatibleAI implements LobeRuntimeAI {
169
169
  client!: OpenAI;
170
170
 
171
+ private id: string;
172
+
171
173
  baseURL!: string;
172
174
  protected _options: ConstructorOptions<T>;
173
175
 
@@ -192,6 +194,8 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
192
194
  }
193
195
 
194
196
  this.baseURL = baseURL || this.client.baseURL;
197
+
198
+ this.id = options.id || provider;
195
199
  }
196
200
 
197
201
  async chat({ responseMode, ...payload }: ChatStreamPayload, options?: ChatCompetitionOptions) {
@@ -210,7 +214,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
210
214
  const streamOptions: OpenAIStreamOptions = {
211
215
  bizErrorTypeTransformer: chatCompletion?.handleStreamBizErrorType,
212
216
  callbacks: options?.callback,
213
- provider,
217
+ provider: this.id,
214
218
  };
215
219
 
216
220
  if (customClient?.createChatCompletionStream) {
@@ -368,7 +372,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
368
372
  if (errorResult)
369
373
  return AgentRuntimeError.chat({
370
374
  ...errorResult,
371
- provider,
375
+ provider: this.id,
372
376
  } as ChatCompletionErrorPayload);
373
377
  }
374
378
 
@@ -379,7 +383,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
379
383
  endpoint: desensitizedEndpoint,
380
384
  error: error as any,
381
385
  errorType: ErrorType.invalidAPIKey,
382
- provider: provider as ModelProvider,
386
+ provider: this.id as ModelProvider,
383
387
  });
384
388
  }
385
389
 
@@ -397,7 +401,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
397
401
  endpoint: desensitizedEndpoint,
398
402
  error: errorResult,
399
403
  errorType: AgentRuntimeErrorType.InsufficientQuota,
400
- provider: provider as ModelProvider,
404
+ provider: this.id as ModelProvider,
401
405
  });
402
406
  }
403
407
 
@@ -406,7 +410,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
406
410
  endpoint: desensitizedEndpoint,
407
411
  error: errorResult,
408
412
  errorType: AgentRuntimeErrorType.ModelNotFound,
409
- provider: provider as ModelProvider,
413
+ provider: this.id as ModelProvider,
410
414
  });
411
415
  }
412
416
 
@@ -417,7 +421,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
417
421
  endpoint: desensitizedEndpoint,
418
422
  error: errorResult,
419
423
  errorType: AgentRuntimeErrorType.ExceededContextWindow,
420
- provider: provider as ModelProvider,
424
+ provider: this.id as ModelProvider,
421
425
  });
422
426
  }
423
427
  }
@@ -426,7 +430,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
426
430
  endpoint: desensitizedEndpoint,
427
431
  error: errorResult,
428
432
  errorType: RuntimeError || ErrorType.bizError,
429
- provider: provider as ModelProvider,
433
+ provider: this.id as ModelProvider,
430
434
  });
431
435
  }
432
436
  };
@@ -1,5 +1,7 @@
1
1
  import OpenAI from 'openai';
2
2
 
3
+ import { disableStreamModels, systemToUserModels } from '@/const/models';
4
+ import { ChatStreamPayload, OpenAIChatMessage } from '@/libs/agent-runtime';
3
5
  import { imageUrlToBase64 } from '@/utils/imageToBase64';
4
6
 
5
7
  import { parseDataUri } from './uriParser';
@@ -38,3 +40,23 @@ export const convertOpenAIMessages = async (messages: OpenAI.ChatCompletionMessa
38
40
  })),
39
41
  )) as OpenAI.ChatCompletionMessageParam[];
40
42
  };
43
+
44
+ export const pruneReasoningPayload = (payload: ChatStreamPayload) => {
45
+ return {
46
+ ...payload,
47
+ frequency_penalty: 0,
48
+ messages: payload.messages.map((message: OpenAIChatMessage) => ({
49
+ ...message,
50
+ role:
51
+ message.role === 'system'
52
+ ? systemToUserModels.has(payload.model)
53
+ ? 'user'
54
+ : 'developer'
55
+ : message.role,
56
+ })),
57
+ presence_penalty: 0,
58
+ stream: !disableStreamModels.has(payload.model),
59
+ temperature: 1,
60
+ top_p: 1,
61
+ };
62
+ };
@@ -42,6 +42,7 @@ export default {
42
42
  debug: authEnv.NEXT_AUTH_DEBUG,
43
43
  pages: {
44
44
  error: '/next-auth/error',
45
+ signIn: '/next-auth/signin',
45
46
  },
46
47
  providers: initSSOProviders(),
47
48
  secret: authEnv.NEXT_AUTH_SECRET,
package/src/middleware.ts CHANGED
@@ -36,7 +36,7 @@ export const config = {
36
36
 
37
37
  '/login(.*)',
38
38
  '/signup(.*)',
39
- '/next-auth/error',
39
+ '/next-auth/(.*)',
40
40
  // ↓ cloud ↓
41
41
  ],
42
42
  };